diff --git a/.circleci/config.yml b/.circleci/config.yml
deleted file mode 100644
index d233e6a..0000000
--- a/.circleci/config.yml
+++ /dev/null
@@ -1,208 +0,0 @@
-version: 2
-jobs:
-  build:
-    working_directory: /root
-    docker:
-      - image: aarondl0/sqlboiler-test:latest
-
-      - image: postgres:9.6
-        environment:
-          POSTGRES_PASSWORD: psqlpassword
-
-      - image: mysql:5.7
-        environment:
-          MYSQL_ROOT_PASSWORD: mysqlpassword
-
-      - image: microsoft/mssql-server-linux:ctp1-4
-        environment:
-          ACCEPT_EULA: 'Y'
-          SA_PASSWORD: 'R@@tr@@t1234'
-
-    environment:
-      GOPATH: /go
-      ROOTPATH: /go/src/github.com/vattle/sqlboiler
-
-    steps:
-      - run:
-          name: Add PSQL Creds
-          command: |
-            echo "*:*:*:*:psqlpassword" > /root/.pgpass
-            chmod 600 /root/.pgpass
-      - run:
-          name: Add MySQL Creds
-          command: |
-            echo -e "[client]\nuser = root\npassword = mysqlpassword\nhost = localhost\nprotocol = tcp" > /root/.my.cnf
-            chmod 600 /root/.my.cnf
-
-      - run:
-          name: Wait for PSQL
-          command: >
-            for i in `seq 30`; do
-              echo "Waiting for psql"
-              set +o errexit
-              psql --host localhost --username postgres --dbname template1 -c 'select * from information_schema.tables;' > /dev/null
-              status=$?
-              set -o errexit
-              if [ $status -eq 0 ]; then
-                break
-              fi
-              if [ $i -eq 30 ]; then
-                echo "Failed to wait for psql"
-                exit 1
-              fi
-              sleep 1
-            done
-
-      - run:
-          name: Wait for MySQL
-          command: >
-            for i in `seq 30`; do
-              echo "Waiting for mysql"
-              set +o errexit
-              mysql --execute 'select * from information_schema.tables;' > /dev/null
-              status=$?
-              set -o errexit
-              if [ $status -eq 0 ]; then
-                break
-              fi
-              if [ $i -eq 30 ]; then
-                echo "Failed to wait for mysql"
-                exit 1
-              fi
-              sleep 1
-            done
-
-      - run:
-          name: Wait for MSSQL
-          command: >
-            for i in `seq 30`; do
-              echo "Waiting for mssql"
-              set +o errexit
-              sqlcmd -H localhost -U sa -P R@@tr@@t1234 -Q "select * from information_schema.tables;" > /dev/null
-              status=$?
-              set -o errexit
-              if [ $status -eq 0 ]; then
-                break
-              fi
-              if [ $i -eq 30 ]; then
-                echo "Failed to wait for mssql"
-                exit 1
-              fi
-              sleep 1
-            done
-
-      - run:
-          name: Make GOPATH
-          command: mkdir -p /go/src/github.com/vattle/sqlboiler
-
-      - checkout:
-          path: /go/src/github.com/vattle/sqlboiler
-
-      - run:
-          name: Create PSQL DB
-          command: |
-            createdb --host localhost --username postgres --owner postgres sqlboiler
-            psql --host localhost --username postgres --dbname sqlboiler < $ROOTPATH/testdata/postgres_test_schema.sql
-      - run:
-          name: Create MySQL DB
-          command: |
-            mysql --host localhost --execute 'create database sqlboiler;'
-            mysql --host localhost --database sqlboiler < $ROOTPATH/testdata/mysql_test_schema.sql
-      - run:
-          name: Create MSSQL DB
-          command: |
-            sqlcmd -S localhost -U sa -P R@@tr@@t1234 -Q "create database sqlboiler;"
-            sqlcmd -S localhost -U sa -P R@@tr@@t1234 -d sqlboiler -i $ROOTPATH/testdata/mssql_test_schema.sql
-
-      - run:
-          name: Build SQLBoiler
-          command: |
-            cd $ROOTPATH; go get -v -t
-            cd $ROOTPATH; go build -v github.com/vattle/sqlboiler
-
-      - run:
-          name: 'Configure SQLBoiler: PSQL'
-          command: echo -e '[postgres]\nhost="localhost"\nport=5432\nuser="postgres"\npass="psqlpassword"\ndbname="sqlboiler"\nsslmode="disable"\n' > $ROOTPATH/sqlboiler.toml
-      - run:
-          name: 'Configure SQLBoiler: MySQL'
-          command: echo -e '[mysql]\nhost="localhost"\nport=3306\nuser="root"\npass="mysqlpassword"\ndbname="sqlboiler"\nsslmode="false"\n' >> $ROOTPATH/sqlboiler.toml
-      - run:
-          name: 'Configure SQLBoiler: MSSQL'
-          command: echo -e '[mssql]\nhost="localhost"\nport=1433\nuser="sa"\npass="R@@tr@@t1234"\ndbname="sqlboiler"\nsslmode="disable"\n' >> $ROOTPATH/sqlboiler.toml
-
-      - run:
-          name: 'Generate: PSQL'
-          command: cd $ROOTPATH; ./sqlboiler -o postgres postgres
-      - run:
-          name: 'Generate: MySQL'
-          command: cd $ROOTPATH; ./sqlboiler -o mysql mysql
-      - run:
-          name: 'Generate: MSSQL'
-          command: cd $ROOTPATH; ./sqlboiler -o mssql mssql
-
-      - run:
-          name: Download generated and test deps
-          command: |
-            cd $ROOTPATH
-            go get -v -t ./...
-
-      - run:
-          name: Run Tests
-          command: |
-            cd $ROOTPATH
-            cp ./testdata/mssql_test_schema.sql mssql/tables_schema.sql
-            go test -v -race ./... | tee test_out.txt
-
-      - run:
-          name: Convert test output to JUNIT
-          command: |
-            mkdir -p $HOME/test_results/go
-            cat $ROOTPATH/test_out.txt | go-junit-report > $HOME/test_results/go/out.xml
-
-      - store_test_results:
-          path: test_results
-#test:
-#  pre:
-#    - echo -e "[postgres]\nhost=\"localhost\"\nport=5432\nuser=\"ubuntu\"\ndbname=\"sqlboiler\"\n" > sqlboiler.toml
-#    - createdb -U ubuntu sqlboiler
-#    - psql -U ubuntu sqlboiler < ./testdata/postgres_test_schema.sql
-#
-#    - echo -e "[mysql]\nhost=\"localhost\"\nport=3306\nuser=\"ubuntu\"\ndbname=\"sqlboiler\"\nsslmode=\"false\"\n" >> sqlboiler.toml
-#    - echo "create database sqlboiler;" | mysql -u ubuntu
-#    - mysql -u ubuntu sqlboiler < ./testdata/mysql_test_schema.sql
-#
-#    - echo -e "[mssql]\nhost=\"localhost\"\nport=1433\nuser=\"sa\"\ndbname=\"sqlboiler\"\nsslmode=\"disable\"\n" >> sqlboiler.toml
-#    - docker run -e 'ACCEPT_EULA=Y' -e 'SA_PASSWORD=R@@tr@@t1234' -p 1433:1433 -d --name mssql microsoft/mssql-server-linux
-#    - sqlcmd -S localhost -U sa -P R@@tr@@t1234 -Q "create database sqlboiler;"
-#    - sqlcmd -S localhost -U sa -P R@@tr@@t1234 -d sqlboiler -i ./testdata/mssql_test_schema.sql
-#
-#    - ./sqlboiler -o postgres postgres
-#    - ./sqlboiler -o mysql    mysql
-#    - ./sqlboiler -o mssql    mssql
-#    - cp ./testdata/mssql_test_schema.sql mssql/tables_schema.sql
-#  override:
-#    - go test -v -race ./... > $CIRCLE_ARTIFACTS/gotest.txt
-#  post:
-#    - cat $CIRCLE_ARTIFACTS/gotest.txt | go-junit-report > $CIRCLE_TEST_REPORTS/junit.xml
-#
-#machine:
-#  environment:
-#    GODIST: go1.7.linux-amd64.tar.gz
-#    PATH: /home/ubuntu/.go_workspace/bin:/usr/local/go/bin:/home/ubuntu/bin:/usr/local/sbin:/usr/local/bin:/usr/sbin:/usr/bin:/sbin:/bin:/usr/games:/usr/local/games:/opt/mssql-tools/bin
-#  post:
-#    - mkdir -p download
-#    - test -e download/$GODIST || curl -o download/$GODIST https://storage.googleapis.com/golang/$GODIST
-#    - sudo rm -rf /usr/local/go
-#    - sudo tar -C /usr/local -xzf download/$GODIST
-#
-#dependencies:
-#  pre:
-#    - mkdir -p /home/ubuntu/.go_workspace/src/github.com/jstemmer
-#    - go get -u github.com/jstemmer/go-junit-report
-#
-#    - curl https://packages.microsoft.com/keys/microsoft.asc | sudo apt-key add -
-#    - curl https://packages.microsoft.com/config/ubuntu/14.04/prod.list | sudo tee /etc/apt/sources.list.d/msprod.list
-#    - sudo apt-get update; sudo apt-get install mssql-tools unixodbc-dev
-#    - docker pull microsoft/mssql-server-linux
-#  cache_directories:
-#    - ~/download
diff --git a/.github/ISSUE_TEMPLATE.md b/.github/ISSUE_TEMPLATE.md
deleted file mode 100644
index 062a49c..0000000
--- a/.github/ISSUE_TEMPLATE.md
+++ /dev/null
@@ -1,18 +0,0 @@
-If you're having a generation problem please answer these questions before submitting your issue. Thanks!
-
-### What version of SQLBoiler are you using (`sqlboiler --version`)?
-
-
-### If this happened at generation time what was the full SQLBoiler command you used to generate your models? (if not applicable leave blank)
-
-
-### If this happened at runtime what code produced the issue? (if not applicable leave blank)
-
-
-### What is the output of the command above with the `-d` flag added to it? (Provided you are comfortable sharing this, it contains a blueprint of your schema)
-
-
-### Please provide a relevant database schema so we can replicate your issue (Provided you are comfortable sharing this)
-
-
-### Further information. What did you do, what did you expect?
diff --git a/.gitignore b/.gitignore
index a7dc0b1..8f7858d 100644
--- a/.gitignore
+++ b/.gitignore
@@ -4,4 +4,3 @@ sqlboiler.toml
 models/
 testschema.sql
 .cover
-/.idea
diff --git a/CONTRIBUTING.md b/CONTRIBUTING.md
deleted file mode 100644
index 8ce4412..0000000
--- a/CONTRIBUTING.md
+++ /dev/null
@@ -1,60 +0,0 @@
-# Contributing
-
-Thanks for your interest in contributing to SQLBoiler!
-
-We have a very lightweight process and aim to keep it that way.
-Read the sections for the piece you're interested in and go from
-there.
-
-If you need quick communication we're usually on [Slack](https://sqlboiler.from-the.cloud).
-
-# New Code / Features
-
-## Small Change
-
-#### TLDR
-
-1. Open PR against **dev** branch with explanation
-1. Participate in Github Code Review
-
-#### Long version
-
-For code that requires little to no discussion, please just open a pull request with some
-explanation against the **dev** branch. All code goes through dev before going out in a release.
-
-## Bigger Change
-
-#### TLDR
-
-1. Start proposal of idea in Github issue
-1. After design concensus, open PR with the work against the **dev** branch
-1. Participate in Github Code Review
-
-#### Long version
-
-If however you're working on something bigger, it's usually better to check with us on the idea
-before starting on a pull request, just so there's no time wasted in redoing/refactoring or being
-outright rejected because the PR is at odds with the design. The best way to accomplish this is to
-open an issue to discuss it. It can always start as a Slack conversation but should eventually end
-up as an issue to avoid penalizing the rest of the users for not being on Slack. Once we agree on
-the way to do something, then open the PR against the **dev** branch and we'll commence code review
-with the Github code review tools. Then it will be merged into dev, and later go out in a release.
-
-# Bugs
-
-Issues should be filed on Github, simply use the template provided and fill in detail. If there's
-more information you feel you should give use your best judgement and add it in, the more the better.
-See the section below for information on providing database schemas.
-
-Bugs that have responses from contributors but no action from those who opened them after a time
-will be closed with the comment: "Stale"
-
-## Schemas
-
-A database schema can help us fix generation issues very quickly. However not everyone is willing to part
-with their database schema for various reasons and that's fine. Instead of providing the schema please
-then provide a subset of your database (you can munge the names so as to be unrecognizable) that can
-help us reproduce the problem.
-
-*Note:* Your schema information is included in the output from `--debug`, so be careful giving this
-information out publicly on a Github issue if you're sensitive about this.
diff --git a/README.md b/README.md
index 51b5fbe..50a316a 100644
--- a/README.md
+++ b/README.md
@@ -1,10 +1,7 @@
-![sqlboiler logo](http://i.imgur.com/NJtCT7y.png)
+# SQLBoiler
 
 [![License](https://img.shields.io/badge/license-BSD-blue.svg)](https://github.com/vattle/sqlboiler/blob/master/LICENSE)
 [![GoDoc](https://godoc.org/github.com/vattle/sqlboiler?status.svg)](https://godoc.org/github.com/vattle/sqlboiler)
-[![Mail](https://img.shields.io/badge/mail%20list-sqlboiler-lightgrey.svg)](https://groups.google.com/a/volatile.tech/forum/#!forum/sqlboiler)
-[![Mail-Annc](https://img.shields.io/badge/mail%20list-sqlboiler--announce-lightgrey.svg)](https://groups.google.com/a/volatile.tech/forum/#!forum/sqlboiler-announce)
-[![Slack](https://img.shields.io/badge/slack-%23general-lightgrey.svg)](https://sqlboiler.from-the.cloud)
 [![CircleCI](https://circleci.com/gh/vattle/sqlboiler.svg?style=shield)](https://circleci.com/gh/vattle/sqlboiler)
 [![Go Report Card](https://goreportcard.com/badge/vattle/sqlboiler)](http://goreportcard.com/report/vattle/sqlboiler)
 
@@ -51,8 +48,6 @@ Table of Contents
         * [Download](#download)
         * [Configuration](#configuration)
         * [Initial Generation](#initial-generation)
-        * [Regeneration](#regeneration)
-        * [Extending Generated Models](#extending-generated-models)
     * [Diagnosing Problems](#diagnosing-problems)
     * [Features &amp; Examples](#features--examples)
       * [Automatic CreatedAt/UpdatedAt](#automatic-createdatupdatedat)
@@ -109,12 +104,9 @@ Table of Contents
 
 - PostgreSQL
 - MySQL
-- Microsoft SQL Server
 
 *Note: Seeking contributors for other database engines.*
 
-*Microsoft SQL Server: Limit with offset support only for SQL Server 2012 and above.*
-
 ### A Small Taste
 
 For a comprehensive list of available operations and examples please see [Features & Examples](#features--examples).
@@ -264,28 +256,20 @@ not to pass them through the command line or environment variables:
 Example:
 
 ```toml
-blacklist=["migrations", "other"]
-schema="myschema"
 [postgres]
-  dbname="dbname"
-  host="localhost"
-  port=5432
-  user="dbusername"
-  pass="dbpassword"
+dbname="dbname"
+host="localhost"
+port=5432
+user="dbusername"
+pass="dbpassword"
+
 [mysql]
-  dbname="dbname"
-  host="localhost"
-  port=3306
-  user="dbusername"
-  pass="dbpassword"
-  sslmode="false"
-[mssql]
-  dbname="dbname"
-  host="localhost"
-  port=1433
-  user="dbusername"
-  pass="dbpassword"
-  sslmode="disable"
+dbname="dbname"
+host="localhost"
+port=3306
+user="dbusername"
+pass="dbpassword"
+sslmode="false"
 ```
 
 #### Initial Generation
@@ -332,119 +316,6 @@ sqlboiler -b goose_migrations postgres
 go test ./models
 ```
 
-*Note: No `mysqldump` or `pg_dump` equivalent for Microsoft SQL Server, so generated tests must be supplemented by `tables_schema.sql` with `CREATE TABLE ...` queries*
-
-
-You can use `go generate` for SQLBoiler if you want to to make it easy to
-run the command.
-
-It's important to not modify anything in the output folder, which brings us to
-the next topic: regeneration.
-
-#### Regeneration
-
-When regenerating the models it's recommended that you completely delete the
-generated directory in a build script or use the `--wipe` flag in SQLBoiler.
-The reasons for this are that sqlboiler doesn't try to diff your files in any
-smart way, it simply writes the files it's going to write whether they're there
-or not and doesn't delete any files that were added by you or previous runs of
-SQLBoiler. In the best case this can cause compilation errors, in the worst case
-this may leave extraneous and unusable code that was generated against tables
-that are no longer in the database.
-
-The bottom line is that this tool should always produce the same result from
-the same source. And the intention is to always regenerate from a pure state.
-The only reason the `--wipe` flag isn't defaulted to on is because we don't
-like programs that `rm -rf` things on the filesystem without being asked to.
-
-#### Extending generated models
-
-There will probably come a time when you want to extend the generated models
-with some kinds of helper functions. A general guideline is to put your
-extension functions into a separate package so that your functions aren't
-accidentally deleted when regenerating. Past that there are 3 main ways to
-extend the models, the first way is the most desirable:
-
-**Method 1: Simple Functions**
-
-```go
-// Package modext is for SQLBoiler helper methods
-package modext
-
-// UserFirstTimeSetup is an extension of the user model.
-func UserFirstTimeSetup(db *sql.DB, u *models.User) error { ... }
-```
-
-Code organization is accomplished by using multiple files, and everything
-is passed as a parameter so these kinds of methods are very easy to test.
-
-Calling code is also very straightforward:
-
-```go
-user, err := Users(db).One()
-// elided error check
-
-err = modext.UserFirstTimeSetup(db, user)
-// elided error check
-```
-
-**Method 2: Empty struct methods**
-
-The above is the best way to code extensions for SQLBoiler, however there may
-be times when the number of methods grows too large and code completion is
-not as helpful anymore. In these cases you may consider structuring the code
-like this:
-
-```go
-// Package modext is for SQLBoiler helper methods
-package modext
-
-type users struct {}
-
-var Users = users{}
-
-// FirstTimeSetup is an extension of the user model.
-func (u users) FirstTimeSetup(db *sql.DB, u *models.User) error { ... }
-```
-
-Calling code then looks a little bit different:
-
-```go
-user, err := Users(db).One()
-// elided error check
-
-err = modext.Users.FirstTimeSetup(db, user)
-// elided error check
-```
-
-This is almost identical to the method above, but gives slight amounts more
-organization at virtually no cost at runtime. It is however not as desirable
-as the first method since it does have some runtime cost and doesn't offer that
-much benefit over it.
-
-**Method 3: Embedding**
-
-This pattern is not for the faint of heart, what it provides in benefits it
-more than makes up for in downsides. It's possible to embed the SQLBoiler
-structs inside your own to enhance them. However it's subject to easy breakages
-and a dependency on these additional objects. It can also introduce
-inconsistencies as some objects may have no extended functionality and therefore
-have no reason to be embedded so you either have to have a struct for each
-generated struct even if it's empty, or have inconsistencies, some places where
-you use the enhanced model, and some where you do not.
-
-```go
-user, err := Users(db).One()
-// elided error check
-
-enhUser := modext.User{user}
-err = ehnUser.FirstTimeSetup(db)
-// elided error check
-```
-
-I don't recommend this pattern, but included it so that people know it's an
-option and also know the problems with it.
-
 ## Diagnosing Problems
 
 The most common causes of problems and panics are:
@@ -470,7 +341,7 @@ Most examples in this section will be demonstrated using the following Postgres
 ```sql
 CREATE TABLE pilots (
   id integer NOT NULL,
-  name text NOT NULL
+  name text NOT NULL,
 );
 
 ALTER TABLE pilots ADD CONSTRAINT pilot_pkey PRIMARY KEY (id);
@@ -480,11 +351,11 @@ CREATE TABLE jets (
   pilot_id integer NOT NULL,
   age integer NOT NULL,
   name text NOT NULL,
-  color text NOT NULL
+  color text NOT NULL,
 );
 
 ALTER TABLE jets ADD CONSTRAINT jet_pkey PRIMARY KEY (id);
-ALTER TABLE jets ADD CONSTRAINT jet_pilots_fkey FOREIGN KEY (pilot_id) REFERENCES pilots(id);
+ALTER TABLE jets ADD CONSTRAINT pilots_fkey FOREIGN KEY (pilot_id) REFERENCES pilots(id);
 
 CREATE TABLE languages (
   id integer NOT NULL,
@@ -501,8 +372,8 @@ CREATE TABLE pilot_languages (
 
 -- Composite primary key
 ALTER TABLE pilot_languages ADD CONSTRAINT pilot_language_pkey PRIMARY KEY (pilot_id, language_id);
-ALTER TABLE pilot_languages ADD CONSTRAINT pilot_language_pilots_fkey FOREIGN KEY (pilot_id) REFERENCES pilots(id);
-ALTER TABLE pilot_languages ADD CONSTRAINT pilot_language_languages_fkey FOREIGN KEY (language_id) REFERENCES languages(id);
+ALTER TABLE pilot_languages ADD CONSTRAINT pilots_fkey FOREIGN KEY (pilot_id) REFERENCES pilots(id);
+ALTER TABLE pilot_languages ADD CONSTRAINT languages_fkey FOREIGN KEY (language_id) REFERENCES languages(id);
 ```
 
 The generated model structs for this schema look like the following. Note that we've included the relationship
@@ -686,9 +557,6 @@ err := pilot.Delete(db) // Regular variant, takes a db handle (boil.Executor int
 pilot.DeleteP(db)       // Panic variant, takes a db handle and panics on error.
 err := pilot.DeleteG()  // Global variant, uses the globally set db handle (boil.SetDB()).
 pilot.DeleteGP()        // Global&Panic variant, combines the global db handle and panic on error.
-
-db.Begin()              // Normal sql package way of creating a transaction
-boil.Begin()            // Uses the global database handle set by boil.SetDB()
 ```
 
 Note that it's slightly different for query building.
@@ -956,16 +824,15 @@ it with the `AddModelHook` method. Here is an example of a before insert hook:
 
 ```go
 // Define my hook function
-func myHook(exec boil.Executor, p *Pilot) error {
+func myHook(exec boil.Executor, p *Pilot) {
   // Do stuff
-  return nil
 }
 
 // Register my before insert hook for pilots
 models.AddPilotHook(boil.BeforeInsertHook, myHook)
 ```
 
-Your `ModelHook` will always be defined as `func(boil.Executor, *Model) error`
+Your `ModelHook` will always be defined as `func(boil.Executor, *Model)`
 
 ### Transactions
 
@@ -989,10 +856,6 @@ tx.Commit()
 tx.Rollback()
 ```
 
-It's also worth noting that there's a way to take advantage of `boil.SetDB()`
-by using the [boil.Begin()](https://godoc.org/github.com/vattle/sqlboiler/boil#Begin) function.
-This opens a transaction using the globally stored database.
-
 ### Debug Logging
 
 Debug logging will print your generated SQL statement and the arguments it is using.
@@ -1269,78 +1132,51 @@ generator is located at: https://github.com/vattle/sqlboiler
 
 If you'd like to run the benchmarks yourself check out our [boilbench](https://github.com/vattle/boilbench) repo.
 
-```bash
-go test -bench . -benchmem
+Here are the results (lower is better):
+
+`go test -bench . -benchmem`
+```
+BenchmarkGORMDelete/gorm-8     	    100000	     15364 ns/op	    5395 B/op	     113 allocs/op
+BenchmarkGORPDelete/gorp-8     	   1000000	      1703 ns/op	     304 B/op	      12 allocs/op
+BenchmarkXORMDelete/xorm-8     	    100000	     14733 ns/op	    3634 B/op	     107 allocs/op
+BenchmarkBoilDelete/boil-8     	   2000000	       986 ns/op	     120 B/op	       7 allocs/op
+
+BenchmarkGORMInsert/gorm-8     	    100000	     19197 ns/op	    8054 B/op	     161 allocs/op
+BenchmarkGORPInsert/gorp-8     	    500000	      3413 ns/op	    1008 B/op	      32 allocs/op
+BenchmarkXORMInsert/xorm-8     	    100000	     15428 ns/op	    5836 B/op	     131 allocs/op
+BenchmarkBoilInsert/boil-8     	    500000	      3041 ns/op	     568 B/op	      21 allocs/op
+
+BenchmarkGORMSelectAll/gorm-8  	     20000	     85422 ns/op	   29912 B/op	     511 allocs/op
+BenchmarkGORPSelectAll/gorp-8  	     50000	     35824 ns/op	    8837 B/op	     312 allocs/op
+BenchmarkXORMSelectAll/xorm-8  	     30000	     58843 ns/op	   13805 B/op	     298 allocs/op
+BenchmarkBoilSelectAll/boil-8  	    100000	     13844 ns/op	    2840 B/op	      61 allocs/op
+
+BenchmarkGORMSelectSubset/gorm-8     10000	    100714 ns/op	   30875 B/op	     517 allocs/op
+BenchmarkGORPSelectSubset/gorp-8     30000	     43547 ns/op	    8837 B/op	     312 allocs/op
+BenchmarkXORMSelectSubset/xorm-8     30000	     48128 ns/op	   12989 B/op	     282 allocs/op
+BenchmarkBoilSelectSubset/boil-8    100000	     12316 ns/op	    2977 B/op	      65 allocs/op
+
+BenchmarkGORMSelectComplex/gorm-8    10000	    133598 ns/op	   49398 B/op	     772 allocs/op
+BenchmarkGORPSelectComplex/gorp-8    50000	     40588 ns/op	    9037 B/op	     321 allocs/op
+BenchmarkXORMSelectComplex/xorm-8    30000	     56367 ns/op	   14174 B/op	     313 allocs/op
+BenchmarkBoilSelectComplex/boil-8   100000	     16941 ns/op	    3821 B/op	      95 allocs/op
+
+BenchmarkGORMUpdate/gorm-8           50000	     25406 ns/op	    9710 B/op	     195 allocs/op
+BenchmarkGORPUpdate/gorp-8          300000	      3614 ns/op	    1152 B/op	      34 allocs/op
+BenchmarkXORMUpdate/xorm-8          100000	     17510 ns/op	    4458 B/op	     132 allocs/op
+BenchmarkBoilUpdate/boil-8          500000	      2958 ns/op	     520 B/op	      16 allocs/op
+
+BenchmarkGORMRawBind/gorm-8    	     10000	    112577 ns/op	   38270 B/op	     595 allocs/op
+BenchmarkGORPRawBind/gorp-8    	     30000	     40967 ns/op	    8837 B/op	     312 allocs/op
+BenchmarkXORMRawBind/xorm-8    	     30000	     54739 ns/op	   12692 B/op	     273 allocs/op
+BenchmarkSQLXRawBind/sqlx-8    	    200000	     13537 ns/op	    4268 B/op	      49 allocs/op
+BenchmarkBoilRawBind/boil-8    	    200000	     11144 ns/op	    4334 B/op	      49 allocs/op
 ```
 
-### Results (lower is better)
-
-Test machine:
-```text
-OS:  Ubuntu 16.04
-CPU: Intel(R) Core(TM) i7-4771 CPU @ 3.50GHz
-Mem: 16GB
-Go:  go version go1.8.1 linux/amd64
-```
-
-The graphs below have many runs like this as input to calculate errors. Here
-is a sample run:
-
-```text
-BenchmarkGORMSelectAll/gorm-8         20000   66500 ns/op   28998 B/op    455 allocs/op
-BenchmarkGORPSelectAll/gorp-8         50000   31305 ns/op    9141 B/op    318 allocs/op
-BenchmarkXORMSelectAll/xorm-8         20000   66074 ns/op   16317 B/op    417 allocs/op
-BenchmarkKallaxSelectAll/kallax-8    100000   18278 ns/op    7428 B/op    145 allocs/op
-BenchmarkBoilSelectAll/boil-8        100000   12759 ns/op    3145 B/op     67 allocs/op
-
-BenchmarkGORMSelectSubset/gorm-8      20000    69469 ns/op   30008 B/op   462 allocs/op
-BenchmarkGORPSelectSubset/gorp-8      50000    31102 ns/op    9141 B/op   318 allocs/op
-BenchmarkXORMSelectSubset/xorm-8      20000    64151 ns/op   15933 B/op   414 allocs/op
-BenchmarkKallaxSelectSubset/kallax-8 100000    16996 ns/op    6499 B/op   132 allocs/op
-BenchmarkBoilSelectSubset/boil-8     100000    13579 ns/op    3281 B/op    71 allocs/op
-
-BenchmarkGORMSelectComplex/gorm-8     20000    76284 ns/op   34566 B/op   521 allocs/op
-BenchmarkGORPSelectComplex/gorp-8     50000    31886 ns/op    9501 B/op   328 allocs/op
-BenchmarkXORMSelectComplex/xorm-8     20000    68430 ns/op   17694 B/op   464 allocs/op
-BenchmarkKallaxSelectComplex/kallax-8 50000    26095 ns/op   10293 B/op   212 allocs/op
-BenchmarkBoilSelectComplex/boil-8    100000    16403 ns/op    4205 B/op   102 allocs/op
-
-BenchmarkGORMDelete/gorm-8           200000    10356 ns/op    5059 B/op    98 allocs/op
-BenchmarkGORPDelete/gorp-8          1000000     1335 ns/op     352 B/op    13 allocs/op
-BenchmarkXORMDelete/xorm-8           200000    10796 ns/op    4146 B/op   122 allocs/op
-BenchmarkKallaxDelete/kallax-8       300000     5141 ns/op    2241 B/op    48 allocs/op
-BenchmarkBoilDelete/boil-8          2000000      796 ns/op     168 B/op     8 allocs/op
-
-BenchmarkGORMInsert/gorm-8           100000    15238 ns/op    8278 B/op   150 allocs/op
-BenchmarkGORPInsert/gorp-8           300000     4648 ns/op    1616 B/op    38 allocs/op
-BenchmarkXORMInsert/xorm-8           100000    12600 ns/op    6092 B/op   138 allocs/op
-BenchmarkKallaxInsert/kallax-8       100000    15115 ns/op    6003 B/op   126 allocs/op
-BenchmarkBoilInsert/boil-8          1000000     2249 ns/op     984 B/op    23 allocs/op
-
-BenchmarkGORMUpdate/gorm-8           100000    18609 ns/op    9389 B/op   174 allocs/op
-BenchmarkGORPUpdate/gorp-8           500000     3180 ns/op    1536 B/op    35 allocs/op
-BenchmarkXORMUpdate/xorm-8           100000    13149 ns/op    5098 B/op   149 allocs/op
-BenchmarkKallaxUpdate/kallax-8       100000    22880 ns/op   11366 B/op   219 allocs/op
-BenchmarkBoilUpdate/boil-8          1000000     1810 ns/op     936 B/op    18 allocs/op
-
-BenchmarkGORMRawBind/gorm-8           20000    65821 ns/op   30502 B/op   444 allocs/op
-BenchmarkGORPRawBind/gorp-8           50000    31300 ns/op    9141 B/op   318 allocs/op
-BenchmarkXORMRawBind/xorm-8           20000    62024 ns/op   15588 B/op   403 allocs/op
-BenchmarkKallaxRawBind/kallax-8      200000     7843 ns/op    4380 B/op    46 allocs/op
-BenchmarkSQLXRawBind/sqlx-8          100000    13056 ns/op    4572 B/op    55 allocs/op
-BenchmarkBoilRawBind/boil-8          200000    11519 ns/op    4638 B/op    55 allocs/op
-```
-
-<img src="http://i.imgur.com/SltE8UQ.png"/><img src="http://i.imgur.com/lzvM5jJ.png"/><img src="http://i.imgur.com/SS0zNd2.png"/>
-
-<img src="http://i.imgur.com/Kk0IM0J.png"/><img src="http://i.imgur.com/1IFtpdP.png"/><img src="http://i.imgur.com/t6Usecx.png"/>
-
-<img src="http://i.imgur.com/98DOzcr.png"/><img src="http://i.imgur.com/NSp5r4Q.png"/><img src="http://i.imgur.com/dEGlOgI.png"/>
-
-<img src="http://i.imgur.com/W0zhuGb.png"/><img src="http://i.imgur.com/YIvDuFv.png"/><img src="http://i.imgur.com/sKwuMaU.png"/>
-
-<img src="http://i.imgur.com/ZUMYVmw.png"/><img src="http://i.imgur.com/T61rH3K.png"/><img src="http://i.imgur.com/lDr0xhY.png"/>
-
-<img src="http://i.imgur.com/LWo10M9.png"/><img src="http://i.imgur.com/Td15owT.png"/><img src="http://i.imgur.com/45XXw4K.png"/>
-
-<img src="http://i.imgur.com/lpP8qds.png"/><img src="http://i.imgur.com/hLyH3jQ.png"/><img src="http://i.imgur.com/C2v10t3.png"/>
+<img style="margin-right:6px;" src="http://i.imgur.com/TglZGoI.png"/>
+<img style="margin-right:6px;" src="http://i.imgur.com/Ktm2ta4.png"/>
+<img style="margin-right:6px;" src="http://i.imgur.com/yv8kFPA.png"/>
+<img style="margin-right:6px;" src="http://i.imgur.com/890Zswe.png"/>
+<img style="margin-right:6px;" src="http://i.imgur.com/qMgoAFJ.png"/>
+<img style="margin-right:6px;" src="http://i.imgur.com/sDoNiCN.png"/>
+<img style="margin-right:6px;" src="http://i.imgur.com/EvUa4UT.png"/>
diff --git a/bdb/column.go b/bdb/column.go
index da56e48..b5c408e 100644
--- a/bdb/column.go
+++ b/bdb/column.go
@@ -3,7 +3,7 @@ package bdb
 import (
 	"strings"
 
-	"github.com/lbryio/sqlboiler/strmangle"
+	"github.com/vattle/sqlboiler/strmangle"
 )
 
 // Column holds information about a database column.
@@ -23,17 +23,6 @@ type Column struct {
 	// https://www.postgresql.org/docs/9.1/static/infoschema-element-types.html
 	ArrType *string
 	UDTName string
-
-	// MySQL only bits
-	// Used to get full type, ex:
-	// tinyint(1) instead of tinyint
-	// Used for "tinyint-as-bool" flag
-	FullDBType string
-
-	// MS SQL only bits
-	// Used to indicate that the value
-	// for this column is auto generated by database on insert (i.e. - timestamp (old) or rowversion (new))
-	AutoGenerated bool
 }
 
 // ColumnNames of the columns.
@@ -57,19 +46,6 @@ func ColumnDBTypes(cols []Column) map[string]string {
 	return types
 }
 
-// FilterColumnsByAuto generates the list of columns that have autogenerated values
-func FilterColumnsByAuto(auto bool, columns []Column) []Column {
-	var cols []Column
-
-	for _, c := range columns {
-		if (auto && c.AutoGenerated) || (!auto && !c.AutoGenerated) {
-			cols = append(cols, c)
-		}
-	}
-
-	return cols
-}
-
 // FilterColumnsByDefault generates the list of columns that have default values
 func FilterColumnsByDefault(defaults bool, columns []Column) []Column {
 	var cols []Column
diff --git a/bdb/drivers/mock.go b/bdb/drivers/mock.go
index d8a5aa0..abe85a4 100644
--- a/bdb/drivers/mock.go
+++ b/bdb/drivers/mock.go
@@ -1,8 +1,8 @@
 package drivers
 
 import (
-	"github.com/lbryio/sqlboiler/bdb"
-	"github.com/lbryio/sqlboiler/strmangle"
+	"github.com/vattle/sqlboiler/bdb"
+	"github.com/vattle/sqlboiler/strmangle"
 )
 
 // MockDriver is a mock implementation of the bdb driver Interface
@@ -58,14 +58,6 @@ func (m *MockDriver) Columns(schema, tableName string) ([]bdb.Column, error) {
 	}[tableName], nil
 }
 
-func (m *MockDriver) UniqueKeyInfo(schema, tableName string) ([]bdb.UniqueKey, error) {
-	return []bdb.UniqueKey{}, nil
-}
-
-func (m *MockDriver) AutoincrementInfo(schema, tableName string) (string, error) {
-	return "", nil
-}
-
 // ForeignKeyInfo returns a list of mock foreignkeys
 func (m *MockDriver) ForeignKeyInfo(schema, tableName string) ([]bdb.ForeignKey, error) {
 	return map[string][]bdb.ForeignKey{
@@ -126,9 +118,6 @@ func (m *MockDriver) PrimaryKeyInfo(schema, tableName string) (*bdb.PrimaryKey,
 // UseLastInsertID returns a database mock LastInsertID compatibility flag
 func (m *MockDriver) UseLastInsertID() bool { return false }
 
-// UseTopClause returns a database mock SQL TOP clause compatibility flag
-func (m *MockDriver) UseTopClause() bool { return false }
-
 // Open mimics a database open call and returns nil for no error
 func (m *MockDriver) Open() error { return nil }
 
diff --git a/bdb/drivers/mssql.go b/bdb/drivers/mssql.go
deleted file mode 100644
index 79f3010..0000000
--- a/bdb/drivers/mssql.go
+++ /dev/null
@@ -1,382 +0,0 @@
-package drivers
-
-import (
-	"database/sql"
-	"fmt"
-	"net/url"
-	"strings"
-
-	_ "github.com/denisenkom/go-mssqldb"
-	"github.com/lbryio/sqlboiler/bdb"
-	"github.com/pkg/errors"
-)
-
-// MSSQLDriver holds the database connection string and a handle
-// to the database connection.
-type MSSQLDriver struct {
-	connStr string
-	dbConn  *sql.DB
-}
-
-// NewMSSQLDriver takes the database connection details as parameters and
-// returns a pointer to a MSSQLDriver object. Note that it is required to
-// call MSSQLDriver.Open() and MSSQLDriver.Close() to open and close
-// the database connection once an object has been obtained.
-func NewMSSQLDriver(user, pass, dbname, host string, port int, sslmode string) *MSSQLDriver {
-	driver := MSSQLDriver{
-		connStr: MSSQLBuildQueryString(user, pass, dbname, host, port, sslmode),
-	}
-
-	return &driver
-}
-
-// MSSQLBuildQueryString builds a query string for MSSQL.
-func MSSQLBuildQueryString(user, pass, dbname, host string, port int, sslmode string) string {
-	query := url.Values{}
-	query.Add("database", dbname)
-	query.Add("encrypt", sslmode)
-
-	u := &url.URL{
-		Scheme: "sqlserver",
-		User:   url.UserPassword(user, pass),
-		Host:   fmt.Sprintf("%s:%d", host, port),
-		// Path:  instance, // if connecting to an instance instead of a port
-		RawQuery: query.Encode(),
-	}
-
-	return u.String()
-}
-
-// Open opens the database connection using the connection string
-func (m *MSSQLDriver) Open() error {
-	var err error
-	m.dbConn, err = sql.Open("mssql", m.connStr)
-	if err != nil {
-		return err
-	}
-
-	return nil
-}
-
-// Close closes the database connection
-func (m *MSSQLDriver) Close() {
-	m.dbConn.Close()
-}
-
-// UseLastInsertID returns false for mssql
-func (m *MSSQLDriver) UseLastInsertID() bool {
-	return false
-}
-
-// UseTopClause returns true to indicate MS SQL supports SQL TOP clause
-func (m *MSSQLDriver) UseTopClause() bool {
-	return true
-}
-
-// TableNames connects to the postgres database and
-// retrieves all table names from the information_schema where the
-// table schema is schema. It uses a whitelist and blacklist.
-func (m *MSSQLDriver) TableNames(schema string, whitelist, blacklist []string) ([]string, error) {
-	var names []string
-
-	query := `
-		SELECT table_name
-		FROM   information_schema.tables
-		WHERE  table_schema = ? AND table_type = 'BASE TABLE'`
-
-	args := []interface{}{schema}
-	if len(whitelist) > 0 {
-		query += fmt.Sprintf(" AND table_name IN (%s);", strings.Repeat(",?", len(whitelist))[1:])
-		for _, w := range whitelist {
-			args = append(args, w)
-		}
-	} else if len(blacklist) > 0 {
-		query += fmt.Sprintf(" AND table_name not IN (%s);", strings.Repeat(",?", len(blacklist))[1:])
-		for _, b := range blacklist {
-			args = append(args, b)
-		}
-	}
-
-	rows, err := m.dbConn.Query(query, args...)
-
-	if err != nil {
-		return nil, err
-	}
-
-	defer rows.Close()
-	for rows.Next() {
-		var name string
-		if err := rows.Scan(&name); err != nil {
-			return nil, err
-		}
-		names = append(names, name)
-	}
-
-	return names, nil
-}
-
-// Columns takes a table name and attempts to retrieve the table information
-// from the database information_schema.columns. It retrieves the column names
-// and column types and returns those as a []Column after TranslateColumnType()
-// converts the SQL types to Go types, for example: "varchar" to "string"
-func (m *MSSQLDriver) Columns(schema, tableName string) ([]bdb.Column, error) {
-	var columns []bdb.Column
-
-	rows, err := m.dbConn.Query(`
-	SELECT column_name,
-       CASE
-         WHEN character_maximum_length IS NULL THEN data_type
-         ELSE data_type + '(' + CAST(character_maximum_length AS VARCHAR) + ')'
-       END AS full_type,
-       data_type,
-	   column_default,
-       CASE
-         WHEN is_nullable = 'YES' THEN 1
-         ELSE 0
-       END AS is_nullable,
-       CASE
-         WHEN EXISTS (SELECT c.column_name
-                      FROM information_schema.table_constraints tc
-                        INNER JOIN information_schema.key_column_usage kcu
-                                ON tc.constraint_name = kcu.constraint_name
-                               AND tc.table_name = kcu.table_name
-                               AND tc.table_schema = kcu.table_schema
-                      WHERE c.column_name = kcu.column_name
-                      AND   tc.table_name = c.table_name
-                      AND   (tc.constraint_type = 'PRIMARY KEY' OR tc.constraint_type = 'UNIQUE')
-                      AND   (SELECT COUNT(*)
-                             FROM information_schema.key_column_usage
-                             WHERE table_schema = kcu.table_schema
-                             AND   table_name = tc.table_name
-                             AND   constraint_name = tc.constraint_name) = 1) THEN 1
-         ELSE 0
-       END AS is_unique,
-	   COLUMNPROPERTY(object_id($1 + '.' + $2), c.column_name, 'IsIdentity') as is_identity
-	FROM information_schema.columns c
-	WHERE table_schema = $1 AND table_name = $2;
-	`, schema, tableName)
-
-	if err != nil {
-		return nil, err
-	}
-	defer rows.Close()
-
-	for rows.Next() {
-		var colName, colType, colFullType string
-		var nullable, unique, identity, auto bool
-		var defaultValue *string
-		if err := rows.Scan(&colName, &colFullType, &colType, &defaultValue, &nullable, &unique, &identity); err != nil {
-			return nil, errors.Wrapf(err, "unable to scan for table %s", tableName)
-		}
-
-		auto = strings.EqualFold(colType, "timestamp") || strings.EqualFold(colType, "rowversion")
-
-		column := bdb.Column{
-			Name:          colName,
-			FullDBType:    colFullType,
-			DBType:        colType,
-			Nullable:      nullable,
-			Unique:        unique,
-			AutoGenerated: auto,
-		}
-
-		if defaultValue != nil && *defaultValue != "NULL" {
-			column.Default = *defaultValue
-		} else if identity || auto {
-			column.Default = "auto"
-		}
-		columns = append(columns, column)
-	}
-
-	return columns, nil
-}
-
-// PrimaryKeyInfo looks up the primary key for a table.
-func (m *MSSQLDriver) PrimaryKeyInfo(schema, tableName string) (*bdb.PrimaryKey, error) {
-	pkey := &bdb.PrimaryKey{}
-	var err error
-
-	query := `
-	SELECT constraint_name
-	FROM   information_schema.table_constraints
-	WHERE  table_name = ? AND constraint_type = 'PRIMARY KEY' AND table_schema = ?;`
-
-	row := m.dbConn.QueryRow(query, tableName, schema)
-	if err = row.Scan(&pkey.Name); err != nil {
-		if err == sql.ErrNoRows {
-			return nil, nil
-		}
-		return nil, err
-	}
-
-	queryColumns := `
-	SELECT column_name
-	FROM   information_schema.key_column_usage
-	WHERE  table_name = ? AND constraint_name = ? AND table_schema = ?;`
-
-	var rows *sql.Rows
-	if rows, err = m.dbConn.Query(queryColumns, tableName, pkey.Name, schema); err != nil {
-		return nil, err
-	}
-	defer rows.Close()
-
-	var columns []string
-	for rows.Next() {
-		var column string
-
-		err = rows.Scan(&column)
-		if err != nil {
-			return nil, err
-		}
-
-		columns = append(columns, column)
-	}
-
-	if err = rows.Err(); err != nil {
-		return nil, err
-	}
-
-	pkey.Columns = columns
-
-	return pkey, nil
-}
-
-func (m *MSSQLDriver) UniqueKeyInfo(schema, tableName string) ([]bdb.UniqueKey, error) {
-	return []bdb.UniqueKey{}, errors.New("not implemented")
-}
-
-func (m *MSSQLDriver) AutoincrementInfo(schema, tableName string) (string, error) {
-	return "", errors.New("not implemented")
-}
-
-// ForeignKeyInfo retrieves the foreign keys for a given table name.
-func (m *MSSQLDriver) ForeignKeyInfo(schema, tableName string) ([]bdb.ForeignKey, error) {
-	var fkeys []bdb.ForeignKey
-
-	query := `
-	SELECT ccu.constraint_name ,
-		ccu.table_name AS local_table ,
-		ccu.column_name AS local_column ,
-		kcu.table_name AS foreign_table ,
-		kcu.column_name AS foreign_column
-	FROM information_schema.constraint_column_usage ccu
-	INNER JOIN information_schema.referential_constraints rc ON ccu.constraint_name = rc.constraint_name
-	INNER JOIN information_schema.key_column_usage kcu ON kcu.constraint_name = rc.unique_constraint_name
-	WHERE ccu.table_schema = ?
-	  AND ccu.constraint_schema = ?
-	  AND ccu.table_name = ?
-	`
-
-	var rows *sql.Rows
-	var err error
-	if rows, err = m.dbConn.Query(query, schema, schema, tableName); err != nil {
-		return nil, err
-	}
-
-	for rows.Next() {
-		var fkey bdb.ForeignKey
-		var sourceTable string
-
-		fkey.Table = tableName
-		err = rows.Scan(&fkey.Name, &sourceTable, &fkey.Column, &fkey.ForeignTable, &fkey.ForeignColumn)
-		if err != nil {
-			return nil, err
-		}
-
-		fkeys = append(fkeys, fkey)
-	}
-
-	if err = rows.Err(); err != nil {
-		return nil, err
-	}
-
-	return fkeys, nil
-}
-
-// TranslateColumnType converts postgres database types to Go types, for example
-// "varchar" to "string" and "bigint" to "int64". It returns this parsed data
-// as a Column object.
-func (m *MSSQLDriver) TranslateColumnType(c bdb.Column) bdb.Column {
-	if c.Nullable {
-		switch c.DBType {
-		case "tinyint":
-			c.Type = "null.Int8"
-		case "smallint":
-			c.Type = "null.Int16"
-		case "mediumint":
-			c.Type = "null.Int32"
-		case "int":
-			c.Type = "null.Int"
-		case "bigint":
-			c.Type = "null.Int64"
-		case "real":
-			c.Type = "null.Float32"
-		case "float":
-			c.Type = "null.Float64"
-		case "boolean", "bool", "bit":
-			c.Type = "null.Bool"
-		case "date", "datetime", "datetime2", "smalldatetime", "time":
-			c.Type = "null.Time"
-		case "binary", "varbinary":
-			c.Type = "null.Bytes"
-		case "timestamp", "rowversion":
-			c.Type = "null.Bytes"
-		case "xml":
-			c.Type = "null.String"
-		case "uniqueidentifier":
-			c.Type = "null.String"
-			c.DBType = "uuid"
-		default:
-			c.Type = "null.String"
-		}
-	} else {
-		switch c.DBType {
-		case "tinyint":
-			c.Type = "int8"
-		case "smallint":
-			c.Type = "int16"
-		case "mediumint":
-			c.Type = "int32"
-		case "int":
-			c.Type = "int"
-		case "bigint":
-			c.Type = "int64"
-		case "real":
-			c.Type = "float32"
-		case "float":
-			c.Type = "float64"
-		case "boolean", "bool", "bit":
-			c.Type = "bool"
-		case "date", "datetime", "datetime2", "smalldatetime", "time":
-			c.Type = "time.Time"
-		case "binary", "varbinary":
-			c.Type = "[]byte"
-		case "timestamp", "rowversion":
-			c.Type = "[]byte"
-		case "xml":
-			c.Type = "string"
-		case "uniqueidentifier":
-			c.Type = "string"
-			c.DBType = "uuid"
-		default:
-			c.Type = "string"
-		}
-	}
-
-	return c
-}
-
-// RightQuote is the quoting character for the right side of the identifier
-func (m *MSSQLDriver) RightQuote() byte {
-	return ']'
-}
-
-// LeftQuote is the quoting character for the left side of the identifier
-func (m *MSSQLDriver) LeftQuote() byte {
-	return '['
-}
-
-// IndexPlaceholders returns true to indicate MS SQL supports indexed placeholders
-func (m *MSSQLDriver) IndexPlaceholders() bool {
-	return true
-}
diff --git a/bdb/drivers/mysql.go b/bdb/drivers/mysql.go
index 5523a6b..7f1cd4b 100644
--- a/bdb/drivers/mysql.go
+++ b/bdb/drivers/mysql.go
@@ -3,21 +3,14 @@ package drivers
 import (
 	"database/sql"
 	"fmt"
-	"sort"
 	"strconv"
 	"strings"
 
 	"github.com/go-sql-driver/mysql"
-	"github.com/lbryio/sqlboiler/bdb"
 	"github.com/pkg/errors"
+	"github.com/vattle/sqlboiler/bdb"
 )
 
-// TinyintAsBool is a global that is set from main.go if a user specifies
-// this flag when generating. This flag only applies to MySQL so we're using
-// a global instead, to avoid breaking the interface. If TinyintAsBool is true
-// then tinyint(1) will be mapped in your generated structs to bool opposed to int8.
-var TinyintAsBool bool
-
 // MySQLDriver holds the database connection string and a handle
 // to the database connection.
 type MySQLDriver struct {
@@ -53,7 +46,6 @@ func MySQLBuildQueryString(user, pass, dbname, host string, port int, sslmode st
 	}
 	config.Addr += ":" + strconv.Itoa(port)
 	config.TLSConfig = sslmode
-	config.AllowNativePasswords = true
 
 	// MySQL is a bad, and by default reads date/datetime into a []byte
 	// instead of a time.Time. Tell it to stop being a bad.
@@ -83,11 +75,6 @@ func (m *MySQLDriver) UseLastInsertID() bool {
 	return true
 }
 
-// UseTopClause returns false to indicate MySQL doesnt support SQL TOP clause
-func (m *MySQLDriver) UseTopClause() bool {
-	return false
-}
-
 // TableNames connects to the postgres database and
 // retrieves all table names from the information_schema where the
 // table schema is public.
@@ -136,7 +123,6 @@ func (m *MySQLDriver) Columns(schema, tableName string) ([]bdb.Column, error) {
 	rows, err := m.dbConn.Query(`
 	select
 	c.column_name,
-	c.column_type,
 	if(c.data_type = 'enum', c.column_type, c.data_type),
 	if(extra = 'auto_increment','auto_increment', c.column_default),
 	c.is_nullable = 'YES',
@@ -146,8 +132,7 @@ func (m *MySQLDriver) Columns(schema, tableName string) ([]bdb.Column, error) {
 			inner join information_schema.key_column_usage kcu
 				on tc.constraint_name = kcu.constraint_name and tc.table_name = kcu.table_name and tc.table_schema = kcu.table_schema
 			where c.column_name = kcu.column_name and tc.table_name = c.table_name and
-				(tc.constraint_type = 'PRIMARY KEY' or tc.constraint_type = 'UNIQUE') and
-				(select count(*) from information_schema.key_column_usage where table_schema = kcu.table_schema and table_name = tc.table_name and constraint_name = tc.constraint_name) = 1
+				(tc.constraint_type = 'PRIMARY KEY' or tc.constraint_type = 'UNIQUE')
 		) as is_unique
 	from information_schema.columns as c
 	where table_name = ? and table_schema = ?;
@@ -159,21 +144,19 @@ func (m *MySQLDriver) Columns(schema, tableName string) ([]bdb.Column, error) {
 	defer rows.Close()
 
 	for rows.Next() {
-		var colName, colType, colFullType string
+		var colName, colType string
 		var nullable, unique bool
 		var defaultValue *string
-		if err := rows.Scan(&colName, &colFullType, &colType, &defaultValue, &nullable, &unique); err != nil {
+		if err := rows.Scan(&colName, &colType, &defaultValue, &nullable, &unique); err != nil {
 			return nil, errors.Wrapf(err, "unable to scan for table %s", tableName)
 		}
 
 		column := bdb.Column{
-			Name:       colName,
-			FullDBType: colFullType, // example: tinyint(1) instead of tinyint
-			DBType:     colType,
-			Nullable:   nullable,
-			Unique:     unique,
+			Name:     colName,
+			DBType:   colType,
+			Nullable: nullable,
+			Unique:   unique,
 		}
-
 		if defaultValue != nil && *defaultValue != "NULL" {
 			column.Default = *defaultValue
 		}
@@ -234,79 +217,6 @@ func (m *MySQLDriver) PrimaryKeyInfo(schema, tableName string) (*bdb.PrimaryKey,
 	return pkey, nil
 }
 
-// UniqueKeyInfo retrieves the unique keys for a given table name.
-func (m *MySQLDriver) UniqueKeyInfo(schema, tableName string) ([]bdb.UniqueKey, error) {
-	var ukeys []bdb.UniqueKey
-
-	query := `
-	select tc.table_name, tc.constraint_name, GROUP_CONCAT(kcu.column_name)
-	from information_schema.table_constraints tc
-	left join information_schema.key_column_usage kcu on tc.constraint_name = kcu.constraint_name and tc.table_name = kcu.table_name and tc.table_schema = kcu.table_schema
-	where tc.table_schema = ? and tc.table_name = ? and tc.constraint_type = "UNIQUE"
-	group by tc.table_name, tc.constraint_name
-	`
-
-	var rows *sql.Rows
-	var err error
-	if rows, err = m.dbConn.Query(query, schema, tableName); err != nil {
-		return nil, err
-	}
-
-	for rows.Next() {
-		var ukey bdb.UniqueKey
-		var columns string
-
-		//ukey.Table = tableName
-		err = rows.Scan(&ukey.Table, &ukey.Name, &columns)
-		if err != nil {
-			return nil, err
-		}
-
-		ukey.Columns = strings.Split(columns, ",")
-		sort.Strings(ukey.Columns)
-
-		ukeys = append(ukeys, ukey)
-	}
-
-	if err = rows.Err(); err != nil {
-		return nil, err
-	}
-
-	return ukeys, nil
-}
-
-// AutoincrementInfo retrieves the autoincrement column for a given table name, if one exists.
-func (m *MySQLDriver) AutoincrementInfo(schema, tableName string) (string, error) {
-	query := `
-	select column_name
-	from information_schema.columns
-	where table_schema = ? and table_name = ? and extra like "%auto_increment%"
-	`
-
-	var rows *sql.Rows
-	var err error
-	if rows, err = m.dbConn.Query(query, schema, tableName); err != nil {
-		return "", err
-	}
-
-	for rows.Next() {
-		var column string
-
-		err = rows.Scan(&column)
-		if err != nil {
-			return "", err
-		}
-
-		return column, nil
-	}
-
-	if err = rows.Err(); err != nil {
-		return "", err
-	}
-
-	return "", nil
-}
-
 // ForeignKeyInfo retrieves the foreign keys for a given table name.
 func (m *MySQLDriver) ForeignKeyInfo(schema, tableName string) ([]bdb.ForeignKey, error) {
 	var fkeys []bdb.ForeignKey
@@ -347,42 +257,18 @@ func (m *MySQLDriver) ForeignKeyInfo(schema, tableName string) ([]bdb.ForeignKey
 // "varchar" to "string" and "bigint" to "int64". It returns this parsed data
 // as a Column object.
 func (m *MySQLDriver) TranslateColumnType(c bdb.Column) bdb.Column {
-	unsigned := strings.Contains(c.FullDBType, "unsigned")
 	if c.Nullable {
 		switch c.DBType {
 		case "tinyint":
-			// map tinyint(1) to bool if TinyintAsBool is true
-			if TinyintAsBool && c.FullDBType == "tinyint(1)" {
-				c.Type = "null.Bool"
-			} else if unsigned {
-				c.Type = "null.Uint8"
-			} else {
-				c.Type = "null.Int8"
-			}
+			c.Type = "null.Int8"
 		case "smallint":
-			if unsigned {
-				c.Type = "null.Uint16"
-			} else {
-				c.Type = "null.Int16"
-			}
+			c.Type = "null.Int16"
 		case "mediumint":
-			if unsigned {
-				c.Type = "null.Uint32"
-			} else {
-				c.Type = "null.Int32"
-			}
+			c.Type = "null.Int32"
 		case "int", "integer":
-			if unsigned {
-				c.Type = "null.Uint"
-			} else {
-				c.Type = "null.Int"
-			}
+			c.Type = "null.Int"
 		case "bigint":
-			if unsigned {
-				c.Type = "null.Uint64"
-			} else {
-				c.Type = "null.Int64"
-			}
+			c.Type = "null.Int64"
 		case "float":
 			c.Type = "null.Float32"
 		case "double", "double precision", "real":
@@ -401,38 +287,15 @@ func (m *MySQLDriver) TranslateColumnType(c bdb.Column) bdb.Column {
 	} else {
 		switch c.DBType {
 		case "tinyint":
-			// map tinyint(1) to bool if TinyintAsBool is true
-			if TinyintAsBool && c.FullDBType == "tinyint(1)" {
-				c.Type = "bool"
-			} else if unsigned {
-				c.Type = "uint8"
-			} else {
-				c.Type = "int8"
-			}
+			c.Type = "int8"
 		case "smallint":
-			if unsigned {
-				c.Type = "uint16"
-			} else {
-				c.Type = "int16"
-			}
+			c.Type = "int16"
 		case "mediumint":
-			if unsigned {
-				c.Type = "uint32"
-			} else {
-				c.Type = "int32"
-			}
+			c.Type = "int32"
 		case "int", "integer":
-			if unsigned {
-				c.Type = "uint"
-			} else {
-				c.Type = "int"
-			}
+			c.Type = "int"
 		case "bigint":
-			if unsigned {
-				c.Type = "uint64"
-			} else {
-				c.Type = "int64"
-			}
+			c.Type = "int64"
 		case "float":
 			c.Type = "float32"
 		case "double", "double precision", "real":
diff --git a/bdb/drivers/postgres.go b/bdb/drivers/postgres.go
index 9af7edd..d14fb1b 100644
--- a/bdb/drivers/postgres.go
+++ b/bdb/drivers/postgres.go
@@ -3,15 +3,14 @@ package drivers
 import (
 	"database/sql"
 	"fmt"
-	"os"
 	"strings"
 
 	// Side-effect import sql driver
 
-	"github.com/lbryio/sqlboiler/bdb"
-	"github.com/lbryio/sqlboiler/strmangle"
 	_ "github.com/lib/pq"
 	"github.com/pkg/errors"
+	"github.com/vattle/sqlboiler/bdb"
+	"github.com/vattle/sqlboiler/strmangle"
 )
 
 // PostgresDriver holds the database connection string and a handle
@@ -79,11 +78,6 @@ func (p *PostgresDriver) UseLastInsertID() bool {
 	return false
 }
 
-// UseTopClause returns false to indicate PSQL doesnt support SQL TOP clause
-func (m *PostgresDriver) UseTopClause() bool {
-	return false
-}
-
 // TableNames connects to the postgres database and
 // retrieves all table names from the information_schema where the
 // table schema is schema. It uses a whitelist and blacklist.
@@ -133,7 +127,7 @@ func (p *PostgresDriver) Columns(schema, tableName string) ([]bdb.Column, error)
 		select
 		c.column_name,
 		(
-			case when pgt.typtype = 'e'
+			case when c.data_type = 'USER-DEFINED' and c.udt_name <> 'hstore'
 			then
 			(
 				select 'enum.' || c.udt_name || '(''' || string_agg(labels.label, ''',''') || ''')'
@@ -161,28 +155,25 @@ func (p *PostgresDriver) Columns(schema, tableName string) ([]bdb.Column, error)
 		c.is_nullable = 'YES' as is_nullable,
 		(select exists(
 			select 1
-			from information_schema.table_constraints tc
-			inner join information_schema.constraint_column_usage as ccu on tc.constraint_name = ccu.constraint_name
-			where tc.table_schema = $1 and tc.constraint_type = 'UNIQUE' and ccu.constraint_schema = $1 and ccu.table_name = c.table_name and ccu.column_name = c.column_name and
-				(select count(*) from information_schema.constraint_column_usage where constraint_schema = $1 and constraint_name = tc.constraint_name) = 1
-		)) OR
-		(select exists(
+				from information_schema.constraint_column_usage as ccu
+			inner join information_schema.table_constraints tc on ccu.constraint_name = tc.constraint_name
+			where ccu.table_name = c.table_name and ccu.column_name = c.column_name and tc.constraint_type = 'UNIQUE'
+			)) OR (select exists(
 			select 1
-			from pg_indexes pgix
-			inner join pg_class pgc on pgix.indexname = pgc.relname and pgc.relkind = 'i' and pgc.relnatts = 1
-			inner join pg_index pgi on pgi.indexrelid = pgc.oid
-			inner join pg_attribute pga on pga.attrelid = pgi.indrelid and pga.attnum = ANY(pgi.indkey)
+			from
+				pg_indexes pgix
+				inner join pg_class pgc on pgix.indexname = pgc.relname and pgc.relkind = 'i'
+				inner join pg_index pgi on pgi.indexrelid = pgc.oid
+				inner join pg_attribute pga on pga.attrelid = pgi.indrelid and pga.attnum = ANY(pgi.indkey)
 			where
 				pgix.schemaname = $1 and pgix.tablename = c.table_name and pga.attname = c.column_name and pgi.indisunique = true
 		)) as is_unique
 
 		from information_schema.columns as c
-		inner join pg_namespace as pgn on pgn.nspname = c.udt_schema
-		left join pg_type pgt on c.data_type = 'USER-DEFINED' and pgn.oid = pgt.typnamespace and c.udt_name = pgt.typname
 		left join information_schema.element_types e
 			on ((c.table_catalog, c.table_schema, c.table_name, 'TABLE', c.dtd_identifier)
 			= (e.object_catalog, e.object_schema, e.object_name, e.object_type, e.collection_type_identifier))
-		where c.table_name = $2 and c.table_schema = $1;
+		where c.table_name=$2 and c.table_schema = $1;
 	`, schema, tableName)
 
 	if err != nil {
@@ -266,32 +257,21 @@ func (p *PostgresDriver) PrimaryKeyInfo(schema, tableName string) (*bdb.PrimaryK
 	return pkey, nil
 }
 
-func (p *PostgresDriver) UniqueKeyInfo(schema, tableName string) ([]bdb.UniqueKey, error) {
-	return []bdb.UniqueKey{}, errors.New("not implemented")
-}
-
-func (p *PostgresDriver) AutoincrementInfo(schema, tableName string) (string, error) {
-	return "", errors.New("not implemented")
-}
-
 // ForeignKeyInfo retrieves the foreign keys for a given table name.
 func (p *PostgresDriver) ForeignKeyInfo(schema, tableName string) ([]bdb.ForeignKey, error) {
 	var fkeys []bdb.ForeignKey
 
 	query := `
 	select
-		pgcon.conname,
-		pgc.relname as source_table,
-		pgasrc.attname as source_column,
-		dstlookupname.relname as dest_table,
-		pgadst.attname as dest_column
-	from pg_namespace pgn
-		inner join pg_class pgc on pgn.oid = pgc.relnamespace and pgc.relkind = 'r'
-		inner join pg_constraint pgcon on pgn.oid = pgcon.connamespace and pgc.oid = pgcon.conrelid
-		inner join pg_class dstlookupname on pgcon.confrelid = dstlookupname.oid
-		inner join pg_attribute pgasrc on pgc.oid = pgasrc.attrelid and pgasrc.attnum = ANY(pgcon.conkey)
-		inner join pg_attribute pgadst on pgcon.confrelid = pgadst.attrelid and pgadst.attnum = ANY(pgcon.confkey)
-	where pgn.nspname = $2 and pgc.relname = $1 and pgcon.contype = 'f'`
+		tc.constraint_name,
+		kcu.table_name as source_table,
+		kcu.column_name as source_column,
+		ccu.table_name as dest_table,
+		ccu.column_name as dest_column
+	from information_schema.table_constraints as tc
+		inner join information_schema.key_column_usage as kcu ON tc.constraint_name = kcu.constraint_name and tc.constraint_schema = kcu.constraint_schema
+		inner join information_schema.constraint_column_usage as ccu ON tc.constraint_name = ccu.constraint_name and tc.constraint_schema = ccu.constraint_schema
+	where tc.table_name = $1 and tc.constraint_type = 'FOREIGN KEY' and tc.table_schema = $2;`
 
 	var rows *sql.Rows
 	var err error
@@ -360,7 +340,7 @@ func (p *PostgresDriver) TranslateColumnType(c bdb.Column) bdb.Column {
 				c.DBType = "hstore"
 			} else {
 				c.Type = "string"
-				fmt.Fprintln(os.Stderr, "Warning: Incompatible data type detected: %s\n", c.UDTName)
+				fmt.Printf("Warning: Incompatible data type detected: %s\n", c.UDTName)
 			}
 		default:
 			c.Type = "null.String"
diff --git a/bdb/interface.go b/bdb/interface.go
index 7f4baa5..2d90a1d 100644
--- a/bdb/interface.go
+++ b/bdb/interface.go
@@ -9,8 +9,6 @@ type Interface interface {
 	TableNames(schema string, whitelist, blacklist []string) ([]string, error)
 	Columns(schema, tableName string) ([]Column, error)
 	PrimaryKeyInfo(schema, tableName string) (*PrimaryKey, error)
-	UniqueKeyInfo(schema, tableName string) ([]UniqueKey, error)
-	AutoincrementInfo(schema, tableName string) (string, error)
 	ForeignKeyInfo(schema, tableName string) ([]ForeignKey, error)
 
 	// TranslateColumnType takes a Database column type and returns a go column type.
@@ -20,10 +18,6 @@ type Interface interface {
 	// the sql.Exec result's LastInsertId
 	UseLastInsertID() bool
 
-	// UseTopClause should return true if the Database is capable of using
-	// the SQL TOP clause
-	UseTopClause() bool
-
 	// Open the database connection
 	Open() error
 	// Close the database connection
@@ -65,18 +59,10 @@ func Tables(db Interface, schema string, whitelist, blacklist []string) ([]Table
 			return nil, errors.Wrapf(err, "unable to fetch table pkey info (%s)", name)
 		}
 
-		if t.UKeys, err = db.UniqueKeyInfo(schema, name); err != nil {
-			return nil, errors.Wrapf(err, "unable to fetch table ukey info (%s)", name)
-		}
-
 		if t.FKeys, err = db.ForeignKeyInfo(schema, name); err != nil {
 			return nil, errors.Wrapf(err, "unable to fetch table fkey info (%s)", name)
 		}
 
-		if t.AutoIncrementColumn, err = db.AutoincrementInfo(schema, name); err != nil {
-			return nil, errors.Wrapf(err, "unable to fetch table autoincrement info (%s)", name)
-		}
-
 		setIsJoinTable(&t)
 
 		tables = append(tables, t)
diff --git a/bdb/interface_test.go b/bdb/interface_test.go
index e5d28fb..effeecf 100644
--- a/bdb/interface_test.go
+++ b/bdb/interface_test.go
@@ -3,14 +3,13 @@ package bdb
 import (
 	"testing"
 
-	"github.com/lbryio/sqlboiler/strmangle"
+	"github.com/vattle/sqlboiler/strmangle"
 )
 
 type testMockDriver struct{}
 
 func (m testMockDriver) TranslateColumnType(c Column) Column { return c }
 func (m testMockDriver) UseLastInsertID() bool               { return false }
-func (m testMockDriver) UseTopClause() bool                  { return false }
 func (m testMockDriver) Open() error                         { return nil }
 func (m testMockDriver) Close()                              {}
 
diff --git a/bdb/keys.go b/bdb/keys.go
index 8007843..909ada2 100644
--- a/bdb/keys.go
+++ b/bdb/keys.go
@@ -8,13 +8,6 @@ type PrimaryKey struct {
 	Columns []string
 }
 
-// UniqueKey represents a unique key constraint in a database
-type UniqueKey struct {
-	Table   string
-	Name    string
-	Columns []string
-}
-
 // ForeignKey represents a foreign key constraint in a database
 type ForeignKey struct {
 	Table    string
diff --git a/bdb/table.go b/bdb/table.go
index 13355aa..62dcd32 100644
--- a/bdb/table.go
+++ b/bdb/table.go
@@ -8,12 +8,9 @@ type Table struct {
 	// For dbs with real schemas, like Postgres.
 	// Example value: "schema_name"."table_name"
 	SchemaName string
-
-	Columns             []Column
-	AutoIncrementColumn string
+	Columns    []Column
 
 	PKey  *PrimaryKey
-	UKeys []UniqueKey
 	FKeys []ForeignKey
 
 	IsJoinTable bool
diff --git a/boil/db.go b/boil/db.go
index f3153bc..84bc7d3 100644
--- a/boil/db.go
+++ b/boil/db.go
@@ -19,10 +19,6 @@ type Transactor interface {
 
 // Beginner begins transactions.
 type Beginner interface {
-	Begin() (Transactor, error)
-}
-
-type SQLBeginner interface {
 	Begin() (*sql.Tx, error)
 }
 
@@ -30,11 +26,7 @@ type SQLBeginner interface {
 func Begin() (Transactor, error) {
 	creator, ok := currentDB.(Beginner)
 	if !ok {
-		creator2, ok2 := currentDB.(SQLBeginner)
-		if !ok2 {
-			panic("database does not support transactions")
-		}
-		return creator2.Begin()
+		panic("database does not support transactions")
 	}
 
 	return creator.Begin()
diff --git a/boil/errors.go b/boil/errors.go
new file mode 100644
index 0000000..4d02169
--- /dev/null
+++ b/boil/errors.go
@@ -0,0 +1,23 @@
+package boil
+
+type boilErr struct {
+	error
+}
+
+// WrapErr wraps err in a boilErr
+func WrapErr(err error) error {
+	return boilErr{
+		error: err,
+	}
+}
+
+// Error returns the underlying error string
+func (e boilErr) Error() string {
+	return e.error.Error()
+}
+
+// IsBoilErr checks if err is a boilErr
+func IsBoilErr(err error) bool {
+	_, ok := err.(boilErr)
+	return ok
+}
diff --git a/boil/errors_test.go b/boil/errors_test.go
new file mode 100644
index 0000000..2b2f1a0
--- /dev/null
+++ b/boil/errors_test.go
@@ -0,0 +1,24 @@
+package boil
+
+import (
+	"errors"
+	"testing"
+)
+
+func TestErrors(t *testing.T) {
+	t.Parallel()
+
+	err := errors.New("test error")
+	if IsBoilErr(err) == true {
+		t.Errorf("Expected false")
+	}
+
+	err = WrapErr(errors.New("test error"))
+	if err.Error() != "test error" {
+		t.Errorf(`Expected "test error", got %v`, err.Error())
+	}
+
+	if IsBoilErr(err) != true {
+		t.Errorf("Expected true")
+	}
+}
diff --git a/boil/global.go b/boil/global.go
index 03c574b..f357d61 100644
--- a/boil/global.go
+++ b/boil/global.go
@@ -1,7 +1,6 @@
 package boil
 
 import (
-	"io"
 	"os"
 	"time"
 )
@@ -21,7 +20,7 @@ var (
 var DebugMode = false
 
 // DebugWriter is where the debug output will be sent if DebugMode is true
-var DebugWriter io.Writer = os.Stdout
+var DebugWriter = os.Stdout
 
 // SetDB initializes the database handle for all template db interactions
 func SetDB(db Executor) {
diff --git a/boilingcore/imports.go b/boilingcore/imports.go
deleted file mode 100644
index e20480b..0000000
--- a/boilingcore/imports.go
+++ /dev/null
@@ -1,466 +0,0 @@
-package boilingcore
-
-import (
-	"bytes"
-	"fmt"
-	"sort"
-	"strings"
-
-	"github.com/lbryio/sqlboiler/bdb"
-)
-
-// imports defines the optional standard imports and
-// thirdParty imports (from github for example)
-type imports struct {
-	standard   importList
-	thirdParty importList
-}
-
-// importList is a list of import names
-type importList []string
-
-func (i importList) Len() int {
-	return len(i)
-}
-
-func (i importList) Swap(k, j int) {
-	i[k], i[j] = i[j], i[k]
-}
-
-func (i importList) Less(k, j int) bool {
-	res := strings.Compare(strings.TrimLeft(i[k], "_ "), strings.TrimLeft(i[j], "_ "))
-	if res <= 0 {
-		return true
-	}
-
-	return false
-}
-
-func combineImports(a, b imports) imports {
-	var c imports
-
-	c.standard = removeDuplicates(combineStringSlices(a.standard, b.standard))
-	c.thirdParty = removeDuplicates(combineStringSlices(a.thirdParty, b.thirdParty))
-
-	sort.Sort(c.standard)
-	sort.Sort(c.thirdParty)
-
-	return c
-}
-
-func combineTypeImports(a imports, b map[string]imports, columns []bdb.Column) imports {
-	tmpImp := imports{
-		standard:   make(importList, len(a.standard)),
-		thirdParty: make(importList, len(a.thirdParty)),
-	}
-
-	copy(tmpImp.standard, a.standard)
-	copy(tmpImp.thirdParty, a.thirdParty)
-
-	for _, col := range columns {
-		for key, imp := range b {
-			if col.Type == key {
-				tmpImp.standard = append(tmpImp.standard, imp.standard...)
-				tmpImp.thirdParty = append(tmpImp.thirdParty, imp.thirdParty...)
-			}
-		}
-	}
-
-	tmpImp.standard = removeDuplicates(tmpImp.standard)
-	tmpImp.thirdParty = removeDuplicates(tmpImp.thirdParty)
-
-	sort.Sort(tmpImp.standard)
-	sort.Sort(tmpImp.thirdParty)
-
-	return tmpImp
-}
-
-func buildImportString(imps imports) []byte {
-	stdlen, thirdlen := len(imps.standard), len(imps.thirdParty)
-	if stdlen+thirdlen < 1 {
-		return []byte{}
-	}
-
-	if stdlen+thirdlen == 1 {
-		var imp string
-		if stdlen == 1 {
-			imp = imps.standard[0]
-		} else {
-			imp = imps.thirdParty[0]
-		}
-		return []byte(fmt.Sprintf("import %s", imp))
-	}
-
-	buf := &bytes.Buffer{}
-	buf.WriteString("import (")
-	for _, std := range imps.standard {
-		fmt.Fprintf(buf, "\n\t%s", std)
-	}
-	if stdlen != 0 && thirdlen != 0 {
-		buf.WriteString("\n")
-	}
-	for _, third := range imps.thirdParty {
-		fmt.Fprintf(buf, "\n\t%s", third)
-	}
-	buf.WriteString("\n)\n")
-
-	return buf.Bytes()
-}
-
-func combineStringSlices(a, b []string) []string {
-	c := make([]string, len(a)+len(b))
-	if len(a) > 0 {
-		copy(c, a)
-	}
-	if len(b) > 0 {
-		copy(c[len(a):], b)
-	}
-
-	return c
-}
-
-func removeDuplicates(dedup []string) []string {
-	if len(dedup) <= 1 {
-		return dedup
-	}
-
-	for i := 0; i < len(dedup)-1; i++ {
-		for j := i + 1; j < len(dedup); j++ {
-			if dedup[i] != dedup[j] {
-				continue
-			}
-
-			if j != len(dedup)-1 {
-				dedup[j] = dedup[len(dedup)-1]
-				j--
-			}
-			dedup = dedup[:len(dedup)-1]
-		}
-	}
-
-	return dedup
-}
-
-type mapImports map[string]imports
-
-type importer struct {
-	Standard     imports
-	TestStandard imports
-
-	Singleton     mapImports
-	TestSingleton mapImports
-
-	TestMain mapImports
-
-	BasedOnType mapImports
-}
-
-// newImporter returns an importer struct with default import values
-func newImporter() importer {
-	var imp importer
-
-	imp.Standard = imports{
-		standard: importList{
-			`"bytes"`,
-			`"database/sql"`,
-			`"fmt"`,
-			`"reflect"`,
-			`"strings"`,
-			`"sync"`,
-			`"time"`,
-		},
-		thirdParty: importList{
-			`"github.com/lbryio/lbry.go/v2/extras/errors"`,
-			`"github.com/lbryio/lbry.go/v2/extras/null"`,
-			`"github.com/lbryio/sqlboiler/boil"`,
-			`"github.com/lbryio/sqlboiler/queries"`,
-			`"github.com/lbryio/sqlboiler/queries/qm"`,
-			`"github.com/lbryio/sqlboiler/strmangle"`,
-		},
-	}
-
-	imp.Singleton = mapImports{
-		"boil_queries": imports{
-			standard: importList{
-				`"fmt"`,
-				`"strings"`,
-			},
-			thirdParty: importList{
-				`"github.com/lbryio/lbry.go/v2/extras/errors"`,
-				`"github.com/lbryio/sqlboiler/boil"`,
-				`"github.com/lbryio/sqlboiler/queries"`,
-				`"github.com/lbryio/sqlboiler/queries/qm"`,
-				`"github.com/lbryio/sqlboiler/strmangle"`,
-			},
-		},
-		"boil_types": {
-			thirdParty: importList{
-				`"github.com/lbryio/lbry.go/v2/extras/errors"`,
-				`"github.com/lbryio/sqlboiler/strmangle"`,
-			},
-		},
-	}
-
-	imp.TestStandard = imports{
-		standard: importList{
-			`"bytes"`,
-			`"reflect"`,
-			`"testing"`,
-		},
-		thirdParty: importList{
-			`"github.com/lbryio/sqlboiler/boil"`,
-			`"github.com/lbryio/sqlboiler/randomize"`,
-			`"github.com/lbryio/sqlboiler/strmangle"`,
-		},
-	}
-
-	imp.TestSingleton = mapImports{
-		"boil_main_test": {
-			standard: importList{
-				`"database/sql"`,
-				`"flag"`,
-				`"fmt"`,
-				`"math/rand"`,
-				`"os"`,
-				`"path/filepath"`,
-				`"testing"`,
-				`"time"`,
-			},
-			thirdParty: importList{
-				`"github.com/kat-co/vala"`,
-				`"github.com/lbryio/lbry.go/v2/extras/errors"`,
-				`"github.com/lbryio/sqlboiler/boil"`,
-				`"github.com/spf13/viper"`,
-			},
-		},
-		"boil_queries_test": {
-			standard: importList{
-				`"bytes"`,
-				`"fmt"`,
-				`"io"`,
-				`"io/ioutil"`,
-				`"math/rand"`,
-				`"regexp"`,
-			},
-			thirdParty: importList{
-				`"github.com/lbryio/sqlboiler/boil"`,
-			},
-		},
-		"boil_suites_test": {
-			standard: importList{
-				`"testing"`,
-			},
-		},
-	}
-
-	imp.TestMain = mapImports{
-		"postgres": {
-			standard: importList{
-				`"bytes"`,
-				`"database/sql"`,
-				`"fmt"`,
-				`"io"`,
-				`"io/ioutil"`,
-				`"os"`,
-				`"os/exec"`,
-				`"strings"`,
-			},
-			thirdParty: importList{
-				`"github.com/lbryio/lbry.go/v2/extras/errors"`,
-				`"github.com/lbryio/sqlboiler/bdb/drivers"`,
-				`"github.com/lbryio/sqlboiler/randomize"`,
-				`_ "github.com/lib/pq"`,
-				`"github.com/spf13/viper"`,
-			},
-		},
-		"mysql": {
-			standard: importList{
-				`"bytes"`,
-				`"database/sql"`,
-				`"fmt"`,
-				`"io"`,
-				`"io/ioutil"`,
-				`"os"`,
-				`"os/exec"`,
-				`"strings"`,
-			},
-			thirdParty: importList{
-				`_ "github.com/go-sql-driver/mysql"`,
-				`"github.com/lbryio/lbry.go/v2/extras/errors"`,
-				`"github.com/lbryio/sqlboiler/bdb/drivers"`,
-				`"github.com/lbryio/sqlboiler/randomize"`,
-				`"github.com/spf13/viper"`,
-			},
-		},
-		"mssql": {
-			standard: importList{
-				`"bytes"`,
-				`"database/sql"`,
-				`"fmt"`,
-				`"os"`,
-				`"os/exec"`,
-				`"strings"`,
-			},
-			thirdParty: importList{
-				`_ "github.com/denisenkom/go-mssqldb"`,
-				`"github.com/lbryio/lbry.go/v2/extras/errors"`,
-				`"github.com/lbryio/sqlboiler/bdb/drivers"`,
-				`"github.com/lbryio/sqlboiler/randomize"`,
-				`"github.com/spf13/viper"`,
-			},
-		},
-	}
-
-	// basedOnType imports are only included in the template output if the
-	// database requires one of the following special types. Check
-	// TranslateColumnType to see the type assignments.
-	imp.BasedOnType = mapImports{
-		"null.Float32": {
-			thirdParty: importList{`"github.com/lbryio/lbry.go/v2/extras/null"`},
-		},
-		"null.Float64": {
-			thirdParty: importList{`"github.com/lbryio/lbry.go/v2/extras/null"`},
-		},
-		"null.Int": {
-			thirdParty: importList{`"github.com/lbryio/lbry.go/v2/extras/null"`},
-		},
-		"null.Int8": {
-			thirdParty: importList{`"github.com/lbryio/lbry.go/v2/extras/null"`},
-		},
-		"null.Int16": {
-			thirdParty: importList{`"github.com/lbryio/lbry.go/v2/extras/null"`},
-		},
-		"null.Int32": {
-			thirdParty: importList{`"github.com/lbryio/lbry.go/v2/extras/null"`},
-		},
-		"null.Int64": {
-			thirdParty: importList{`"github.com/lbryio/lbry.go/v2/extras/null"`},
-		},
-		"null.Uint": {
-			thirdParty: importList{`"github.com/lbryio/lbry.go/v2/extras/null"`},
-		},
-		"null.Uint8": {
-			thirdParty: importList{`"github.com/lbryio/lbry.go/v2/extras/null"`},
-		},
-		"null.Uint16": {
-			thirdParty: importList{`"github.com/lbryio/lbry.go/v2/extras/null"`},
-		},
-		"null.Uint32": {
-			thirdParty: importList{`"github.com/lbryio/lbry.go/v2/extras/null"`},
-		},
-		"null.Uint64": {
-			thirdParty: importList{`"github.com/lbryio/lbry.go/v2/extras/null"`},
-		},
-		"null.String": {
-			thirdParty: importList{`"github.com/lbryio/lbry.go/v2/extras/null"`},
-		},
-		"null.Bool": {
-			thirdParty: importList{`"github.com/lbryio/lbry.go/v2/extras/null"`},
-		},
-		"null.Time": {
-			thirdParty: importList{`"github.com/lbryio/lbry.go/v2/extras/null"`},
-		},
-		"null.JSON": {
-			thirdParty: importList{`"github.com/lbryio/lbry.go/v2/extras/null"`},
-		},
-		"null.Bytes": {
-			thirdParty: importList{`"github.com/lbryio/lbry.go/v2/extras/null"`},
-		},
-		"time.Time": {
-			standard: importList{`"time"`},
-		},
-		"types.JSON": {
-			thirdParty: importList{`"github.com/lbryio/sqlboiler/types"`},
-		},
-		"types.BytesArray": {
-			thirdParty: importList{`"github.com/lbryio/sqlboiler/types"`},
-		},
-		"types.Int64Array": {
-			thirdParty: importList{`"github.com/lbryio/sqlboiler/types"`},
-		},
-		"types.Float64Array": {
-			thirdParty: importList{`"github.com/lbryio/sqlboiler/types"`},
-		},
-		"types.BoolArray": {
-			thirdParty: importList{`"github.com/lbryio/sqlboiler/types"`},
-		},
-		"types.StringArray": {
-			thirdParty: importList{`"github.com/lbryio/sqlboiler/types"`},
-		},
-		"types.Hstore": {
-			thirdParty: importList{`"github.com/lbryio/sqlboiler/types"`},
-		},
-	}
-
-	return imp
-}
-
-// Remove an import matching the match string under the specified key.
-// Remove will search both standard and thirdParty import lists for a match.
-func (m mapImports) Remove(key string, match string) {
-	mp := m[key]
-	for idx := 0; idx < len(mp.standard); idx++ {
-		if mp.standard[idx] == match {
-			mp.standard[idx] = mp.standard[len(mp.standard)-1]
-			mp.standard = mp.standard[:len(mp.standard)-1]
-			break
-		}
-	}
-	for idx := 0; idx < len(mp.thirdParty); idx++ {
-		if mp.thirdParty[idx] == match {
-			mp.thirdParty[idx] = mp.thirdParty[len(mp.thirdParty)-1]
-			mp.thirdParty = mp.thirdParty[:len(mp.thirdParty)-1]
-			break
-		}
-	}
-
-	// delete the key and return if both import lists are empty
-	if len(mp.thirdParty) == 0 && len(mp.standard) == 0 {
-		delete(m, key)
-		return
-	}
-
-	m[key] = mp
-}
-
-// Add an import under the specified key. If the key does not exist, it
-// will be created.
-func (m mapImports) Add(key string, value string, thirdParty bool) {
-	mp := m[key]
-	if thirdParty {
-		mp.thirdParty = append(mp.thirdParty, value)
-	} else {
-		mp.standard = append(mp.standard, value)
-	}
-
-	m[key] = mp
-}
-
-// Remove an import matching the match string under the specified key.
-// Remove will search both standard and thirdParty import lists for a match.
-func (i *imports) Remove(match string) {
-	for idx := 0; idx < len(i.standard); idx++ {
-		if i.standard[idx] == match {
-			i.standard[idx] = i.standard[len(i.standard)-1]
-			i.standard = i.standard[:len(i.standard)-1]
-			break
-		}
-	}
-	for idx := 0; idx < len(i.thirdParty); idx++ {
-		if i.thirdParty[idx] == match {
-			i.thirdParty[idx] = i.thirdParty[len(i.thirdParty)-1]
-			i.thirdParty = i.thirdParty[:len(i.thirdParty)-1]
-			break
-		}
-	}
-}
-
-// Add an import under the specified key. If the key does not exist, it
-// will be created.
-func (i *imports) Add(value string, thirdParty bool) {
-	if thirdParty {
-		i.thirdParty = append(i.thirdParty, value)
-	} else {
-		i.standard = append(i.standard, value)
-	}
-}
diff --git a/boilingcore/imports_test.go b/boilingcore/imports_test.go
deleted file mode 100644
index d0043c0..0000000
--- a/boilingcore/imports_test.go
+++ /dev/null
@@ -1,395 +0,0 @@
-package boilingcore
-
-import (
-	"reflect"
-	"sort"
-	"testing"
-
-	"github.com/pkg/errors"
-	"github.com/lbryio/sqlboiler/bdb"
-)
-
-func TestImportsSort(t *testing.T) {
-	t.Parallel()
-
-	a1 := importList{
-		`"fmt"`,
-		`"errors"`,
-	}
-	a2 := importList{
-		`_ "github.com/lib/pq"`,
-		`_ "github.com/gorilla/n"`,
-		`"github.com/gorilla/mux"`,
-		`"github.com/gorilla/websocket"`,
-	}
-
-	a1Expected := importList{`"errors"`, `"fmt"`}
-	a2Expected := importList{
-		`"github.com/gorilla/mux"`,
-		`_ "github.com/gorilla/n"`,
-		`"github.com/gorilla/websocket"`,
-		`_ "github.com/lib/pq"`,
-	}
-
-	sort.Sort(a1)
-	if !reflect.DeepEqual(a1, a1Expected) {
-		t.Errorf("Expected a1 to match a1Expected, got: %v", a1)
-	}
-
-	for i, v := range a1 {
-		if v != a1Expected[i] {
-			t.Errorf("Expected a1[%d] to match a1Expected[%d]:\n%s\n%s\n", i, i, v, a1Expected[i])
-		}
-	}
-
-	sort.Sort(a2)
-	if !reflect.DeepEqual(a2, a2Expected) {
-		t.Errorf("Expected a2 to match a2expected, got: %v", a2)
-	}
-
-	for i, v := range a2 {
-		if v != a2Expected[i] {
-			t.Errorf("Expected a2[%d] to match a2Expected[%d]:\n%s\n%s\n", i, i, v, a1Expected[i])
-		}
-	}
-}
-
-func TestImportsAddAndRemove(t *testing.T) {
-	t.Parallel()
-
-	var imp imports
-	imp.Add("value", false)
-	if len(imp.standard) != 1 {
-		t.Errorf("expected len 1, got %d", len(imp.standard))
-	}
-	if imp.standard[0] != "value" {
-		t.Errorf("expected %q to be added", "value")
-	}
-	imp.Add("value2", true)
-	if len(imp.thirdParty) != 1 {
-		t.Errorf("expected len 1, got %d", len(imp.thirdParty))
-	}
-	if imp.thirdParty[0] != "value2" {
-		t.Errorf("expected %q to be added", "value2")
-	}
-
-	imp.Remove("value")
-	if len(imp.standard) != 0 {
-		t.Errorf("expected len 0, got %d", len(imp.standard))
-	}
-	imp.Remove("value")
-	if len(imp.standard) != 0 {
-		t.Errorf("expected len 0, got %d", len(imp.standard))
-	}
-	imp.Remove("value2")
-	if len(imp.thirdParty) != 0 {
-		t.Errorf("expected len 0, got %d", len(imp.thirdParty))
-	}
-
-	// Test deleting last element in len 2 slice
-	imp.Add("value3", false)
-	imp.Add("value4", false)
-	if len(imp.standard) != 2 {
-		t.Errorf("expected len 2, got %d", len(imp.standard))
-	}
-	imp.Remove("value4")
-	if len(imp.standard) != 1 {
-		t.Errorf("expected len 1, got %d", len(imp.standard))
-	}
-	if imp.standard[0] != "value3" {
-		t.Errorf("expected %q, got %q", "value3", imp.standard[0])
-	}
-	// Test deleting first element in len 2 slice
-	imp.Add("value4", false)
-	imp.Remove("value3")
-	if len(imp.standard) != 1 {
-		t.Errorf("expected len 1, got %d", len(imp.standard))
-	}
-	if imp.standard[0] != "value4" {
-		t.Errorf("expected %q, got %q", "value4", imp.standard[0])
-	}
-	imp.Remove("value2")
-	if len(imp.thirdParty) != 0 {
-		t.Errorf("expected len 0, got %d", len(imp.thirdParty))
-	}
-
-	// Test deleting last element in len 2 slice
-	imp.Add("value5", true)
-	imp.Add("value6", true)
-	if len(imp.thirdParty) != 2 {
-		t.Errorf("expected len 2, got %d", len(imp.thirdParty))
-	}
-	imp.Remove("value6")
-	if len(imp.thirdParty) != 1 {
-		t.Errorf("expected len 1, got %d", len(imp.thirdParty))
-	}
-	if imp.thirdParty[0] != "value5" {
-		t.Errorf("expected %q, got %q", "value5", imp.thirdParty[0])
-	}
-	// Test deleting first element in len 2 slice
-	imp.Add("value6", true)
-	imp.Remove("value5")
-	if len(imp.thirdParty) != 1 {
-		t.Errorf("expected len 1, got %d", len(imp.thirdParty))
-	}
-	if imp.thirdParty[0] != "value6" {
-		t.Errorf("expected %q, got %q", "value6", imp.thirdParty[0])
-	}
-}
-
-func TestMapImportsAddAndRemove(t *testing.T) {
-	t.Parallel()
-
-	imp := mapImports{}
-	imp.Add("cat", "value", false)
-	if len(imp["cat"].standard) != 1 {
-		t.Errorf("expected len 1, got %d", len(imp["cat"].standard))
-	}
-	if imp["cat"].standard[0] != "value" {
-		t.Errorf("expected %q to be added", "value")
-	}
-	imp.Add("cat", "value2", true)
-	if len(imp["cat"].thirdParty) != 1 {
-		t.Errorf("expected len 1, got %d", len(imp["cat"].thirdParty))
-	}
-	if imp["cat"].thirdParty[0] != "value2" {
-		t.Errorf("expected %q to be added", "value2")
-	}
-
-	imp.Remove("cat", "value")
-	if len(imp["cat"].standard) != 0 {
-		t.Errorf("expected len 0, got %d", len(imp["cat"].standard))
-	}
-	imp.Remove("cat", "value")
-	if len(imp["cat"].standard) != 0 {
-		t.Errorf("expected len 0, got %d", len(imp["cat"].standard))
-	}
-	imp.Remove("cat", "value2")
-	if len(imp["cat"].thirdParty) != 0 {
-		t.Errorf("expected len 0, got %d", len(imp["cat"].thirdParty))
-	}
-	// If there are no elements left in key, test key is deleted
-	_, ok := imp["cat"]
-	if ok {
-		t.Errorf("expected cat key to be deleted when list empty")
-	}
-
-	// Test deleting last element in len 2 slice
-	imp.Add("cat", "value3", false)
-	imp.Add("cat", "value4", false)
-	if len(imp["cat"].standard) != 2 {
-		t.Errorf("expected len 2, got %d", len(imp["cat"].standard))
-	}
-	imp.Remove("cat", "value4")
-	if len(imp["cat"].standard) != 1 {
-		t.Errorf("expected len 1, got %d", len(imp["cat"].standard))
-	}
-	if imp["cat"].standard[0] != "value3" {
-		t.Errorf("expected %q, got %q", "value3", imp["cat"].standard[0])
-	}
-	// Test deleting first element in len 2 slice
-	imp.Add("cat", "value4", false)
-	imp.Remove("cat", "value3")
-	if len(imp["cat"].standard) != 1 {
-		t.Errorf("expected len 1, got %d", len(imp["cat"].standard))
-	}
-	if imp["cat"].standard[0] != "value4" {
-		t.Errorf("expected %q, got %q", "value4", imp["cat"].standard[0])
-	}
-	imp.Remove("cat", "value2")
-	if len(imp["cat"].thirdParty) != 0 {
-		t.Errorf("expected len 0, got %d", len(imp["cat"].thirdParty))
-	}
-
-	// Test deleting last element in len 2 slice
-	imp.Add("dog", "value5", true)
-	imp.Add("dog", "value6", true)
-	if len(imp["dog"].thirdParty) != 2 {
-		t.Errorf("expected len 2, got %d", len(imp["dog"].thirdParty))
-	}
-	imp.Remove("dog", "value6")
-	if len(imp["dog"].thirdParty) != 1 {
-		t.Errorf("expected len 1, got %d", len(imp["dog"].thirdParty))
-	}
-	if imp["dog"].thirdParty[0] != "value5" {
-		t.Errorf("expected %q, got %q", "value5", imp["dog"].thirdParty[0])
-	}
-	// Test deleting first element in len 2 slice
-	imp.Add("dog", "value6", true)
-	imp.Remove("dog", "value5")
-	if len(imp["dog"].thirdParty) != 1 {
-		t.Errorf("expected len 1, got %d", len(imp["dog"].thirdParty))
-	}
-	if imp["dog"].thirdParty[0] != "value6" {
-		t.Errorf("expected %q, got %q", "value6", imp["dog"].thirdParty[0])
-	}
-}
-
-func TestCombineTypeImports(t *testing.T) {
-	t.Parallel()
-
-	imports1 := imports{
-		standard: importList{
-			`"errors"`,
-			`"fmt"`,
-		},
-		thirdParty: importList{
-			`"github.com/lbryio/sqlboiler/boil"`,
-		},
-	}
-
-	importsExpected := imports{
-		standard: importList{
-			`"errors"`,
-			`"fmt"`,
-			`"time"`,
-		},
-		thirdParty: importList{
-			`"github.com/lbryio/sqlboiler/boil"`,
-			`"github.com/lbryio/lbry.go/v2/extras/null"`,
-		},
-	}
-
-	cols := []bdb.Column{
-		{
-			Type: "null.Time",
-		},
-		{
-			Type: "null.Time",
-		},
-		{
-			Type: "time.Time",
-		},
-		{
-			Type: "null.Float",
-		},
-	}
-
-	imps := newImporter()
-
-	res1 := combineTypeImports(imports1, imps.BasedOnType, cols)
-
-	if !reflect.DeepEqual(res1, importsExpected) {
-		t.Errorf("Expected res1 to match importsExpected, got:\n\n%#v\n", res1)
-	}
-
-	imports2 := imports{
-		standard: importList{
-			`"errors"`,
-			`"fmt"`,
-			`"time"`,
-		},
-		thirdParty: importList{
-			`"github.com/lbryio/sqlboiler/boil"`,
-			`"github.com/lbryio/lbry.go/v2/extras/null"`,
-		},
-	}
-
-	res2 := combineTypeImports(imports2, imps.BasedOnType, cols)
-
-	if !reflect.DeepEqual(res2, importsExpected) {
-		t.Errorf("Expected res2 to match importsExpected, got:\n\n%#v\n", res1)
-	}
-}
-
-func TestCombineImports(t *testing.T) {
-	t.Parallel()
-
-	a := imports{
-		standard:   importList{"fmt"},
-		thirdParty: importList{"github.com/lbryio/sqlboiler", "github.com/lbryio/lbry.go/v2/extras/null"},
-	}
-	b := imports{
-		standard:   importList{"os"},
-		thirdParty: importList{"github.com/lbryio/sqlboiler"},
-	}
-
-	c := combineImports(a, b)
-
-	if c.standard[0] != "fmt" && c.standard[1] != "os" {
-		t.Errorf("Wanted: fmt, os got: %#v", c.standard)
-	}
-	if c.thirdParty[0] != "github.com/lbryio/sqlboiler" && c.thirdParty[1] != "github.com/lbryio/lbry.go/v2/extras/null" {
-		t.Errorf("Wanted: github.com/lbryio/sqlboiler, github.com/lbryio/lbry.go/v2/extras/null got: %#v", c.thirdParty)
-	}
-}
-
-func TestRemoveDuplicates(t *testing.T) {
-	t.Parallel()
-
-	hasDups := func(possible []string) error {
-		for i := 0; i < len(possible)-1; i++ {
-			for j := i + 1; j < len(possible); j++ {
-				if possible[i] == possible[j] {
-					return errors.Errorf("found duplicate: %s [%d] [%d]", possible[i], i, j)
-				}
-			}
-		}
-
-		return nil
-	}
-
-	if len(removeDuplicates([]string{})) != 0 {
-		t.Error("It should have returned an empty slice")
-	}
-
-	oneItem := []string{"patrick"}
-	slice := removeDuplicates(oneItem)
-	if ln := len(slice); ln != 1 {
-		t.Error("Length was wrong:", ln)
-	} else if oneItem[0] != slice[0] {
-		t.Errorf("Slices differ: %#v %#v", oneItem, slice)
-	}
-
-	slice = removeDuplicates([]string{"hello", "patrick", "hello"})
-	if ln := len(slice); ln != 2 {
-		t.Error("Length was wrong:", ln)
-	}
-	if err := hasDups(slice); err != nil {
-		t.Error(err)
-	}
-
-	slice = removeDuplicates([]string{"five", "patrick", "hello", "hello", "patrick", "hello", "hello"})
-	if ln := len(slice); ln != 3 {
-		t.Error("Length was wrong:", ln)
-	}
-	if err := hasDups(slice); err != nil {
-		t.Error(err)
-	}
-}
-
-func TestCombineStringSlices(t *testing.T) {
-	t.Parallel()
-
-	var a, b []string
-	slice := combineStringSlices(a, b)
-	if ln := len(slice); ln != 0 {
-		t.Error("Len was wrong:", ln)
-	}
-
-	a = []string{"1", "2"}
-	slice = combineStringSlices(a, b)
-	if ln := len(slice); ln != 2 {
-		t.Error("Len was wrong:", ln)
-	} else if slice[0] != a[0] || slice[1] != a[1] {
-		t.Errorf("Slice mismatch: %#v %#v", a, slice)
-	}
-
-	b = a
-	a = nil
-	slice = combineStringSlices(a, b)
-	if ln := len(slice); ln != 2 {
-		t.Error("Len was wrong:", ln)
-	} else if slice[0] != b[0] || slice[1] != b[1] {
-		t.Errorf("Slice mismatch: %#v %#v", b, slice)
-	}
-
-	a = b
-	b = []string{"3", "4"}
-	slice = combineStringSlices(a, b)
-	if ln := len(slice); ln != 4 {
-		t.Error("Len was wrong:", ln)
-	} else if slice[0] != a[0] || slice[1] != a[1] || slice[2] != b[0] || slice[3] != b[1] {
-		t.Errorf("Slice mismatch: %#v + %#v != #%v", a, b, slice)
-	}
-}
diff --git a/circle.yml b/circle.yml
new file mode 100644
index 0000000..ed75a12
--- /dev/null
+++ b/circle.yml
@@ -0,0 +1,28 @@
+test:
+  pre:
+    - mkdir -p /home/ubuntu/.go_workspace/src/github.com/jstemmer
+    - go get -u github.com/jstemmer/go-junit-report
+    - echo -e "[postgres]\nhost=\"localhost\"\nport=5432\nuser=\"ubuntu\"\ndbname=\"sqlboiler\"\n[mysql]\nhost=\"localhost\"\nport=3306\nuser=\"ubuntu\"\ndbname=\"sqlboiler\"\nsslmode=\"false\"" > sqlboiler.toml
+    - createdb -U ubuntu sqlboiler
+    - psql -U ubuntu sqlboiler < ./testdata/postgres_test_schema.sql
+    - echo "create database sqlboiler;" | mysql -u ubuntu
+    - mysql -u ubuntu sqlboiler < ./testdata/mysql_test_schema.sql
+    - ./sqlboiler postgres -o "postgres"
+    - ./sqlboiler postgres -o "mysql"
+  override:
+    - go test -v -race ./... > $CIRCLE_ARTIFACTS/gotest.txt
+  post:
+    - cat $CIRCLE_ARTIFACTS/gotest.txt | go-junit-report > $CIRCLE_TEST_REPORTS/junit.xml
+
+machine:
+  environment:
+    GODIST: "go1.7.linux-amd64.tar.gz"
+  post:
+    - mkdir -p download
+    - test -e download/$GODIST || curl -o download/$GODIST https://storage.googleapis.com/golang/$GODIST
+    - sudo rm -rf /usr/local/go
+    - sudo tar -C /usr/local -xzf download/$GODIST
+
+dependencies:
+  cache_directories:
+    - ~/download
diff --git a/boilingcore/config.go b/config.go
similarity index 74%
rename from boilingcore/config.go
rename to config.go
index 63b0552..da28134 100644
--- a/boilingcore/config.go
+++ b/config.go
@@ -1,4 +1,4 @@
-package boilingcore
+package main
 
 // Config for the running of the commands
 type Config struct {
@@ -10,16 +10,13 @@ type Config struct {
 	WhitelistTables  []string
 	BlacklistTables  []string
 	Tags             []string
-	Replacements     []string
 	Debug            bool
 	NoTests          bool
 	NoHooks          bool
 	NoAutoTimestamps bool
-	Wipe             bool
 
 	Postgres PostgresConfig
 	MySQL    MySQLConfig
-	MSSQL    MSSQLConfig
 }
 
 // PostgresConfig configures a postgres database
@@ -41,13 +38,3 @@ type MySQLConfig struct {
 	DBName  string
 	SSLMode string
 }
-
-// MSSQLConfig configures a mysql database
-type MSSQLConfig struct {
-	User    string
-	Pass    string
-	Host    string
-	Port    int
-	DBName  string
-	SSLMode string
-}
diff --git a/imports.go b/imports.go
new file mode 100644
index 0000000..880ba77
--- /dev/null
+++ b/imports.go
@@ -0,0 +1,348 @@
+package main
+
+import (
+	"bytes"
+	"fmt"
+	"sort"
+	"strings"
+
+	"github.com/vattle/sqlboiler/bdb"
+)
+
+// imports defines the optional standard imports and
+// thirdParty imports (from github for example)
+type imports struct {
+	standard   importList
+	thirdParty importList
+}
+
+// importList is a list of import names
+type importList []string
+
+func (i importList) Len() int {
+	return len(i)
+}
+
+func (i importList) Swap(k, j int) {
+	i[k], i[j] = i[j], i[k]
+}
+
+func (i importList) Less(k, j int) bool {
+	res := strings.Compare(strings.TrimLeft(i[k], "_ "), strings.TrimLeft(i[j], "_ "))
+	if res <= 0 {
+		return true
+	}
+
+	return false
+}
+
+func combineImports(a, b imports) imports {
+	var c imports
+
+	c.standard = removeDuplicates(combineStringSlices(a.standard, b.standard))
+	c.thirdParty = removeDuplicates(combineStringSlices(a.thirdParty, b.thirdParty))
+
+	sort.Sort(c.standard)
+	sort.Sort(c.thirdParty)
+
+	return c
+}
+
+func combineTypeImports(a imports, b map[string]imports, columns []bdb.Column) imports {
+	tmpImp := imports{
+		standard:   make(importList, len(a.standard)),
+		thirdParty: make(importList, len(a.thirdParty)),
+	}
+
+	copy(tmpImp.standard, a.standard)
+	copy(tmpImp.thirdParty, a.thirdParty)
+
+	for _, col := range columns {
+		for key, imp := range b {
+			if col.Type == key {
+				tmpImp.standard = append(tmpImp.standard, imp.standard...)
+				tmpImp.thirdParty = append(tmpImp.thirdParty, imp.thirdParty...)
+			}
+		}
+	}
+
+	tmpImp.standard = removeDuplicates(tmpImp.standard)
+	tmpImp.thirdParty = removeDuplicates(tmpImp.thirdParty)
+
+	sort.Sort(tmpImp.standard)
+	sort.Sort(tmpImp.thirdParty)
+
+	return tmpImp
+}
+
+func buildImportString(imps imports) []byte {
+	stdlen, thirdlen := len(imps.standard), len(imps.thirdParty)
+	if stdlen+thirdlen < 1 {
+		return []byte{}
+	}
+
+	if stdlen+thirdlen == 1 {
+		var imp string
+		if stdlen == 1 {
+			imp = imps.standard[0]
+		} else {
+			imp = imps.thirdParty[0]
+		}
+		return []byte(fmt.Sprintf("import %s", imp))
+	}
+
+	buf := &bytes.Buffer{}
+	buf.WriteString("import (")
+	for _, std := range imps.standard {
+		fmt.Fprintf(buf, "\n\t%s", std)
+	}
+	if stdlen != 0 && thirdlen != 0 {
+		buf.WriteString("\n")
+	}
+	for _, third := range imps.thirdParty {
+		fmt.Fprintf(buf, "\n\t%s", third)
+	}
+	buf.WriteString("\n)\n")
+
+	return buf.Bytes()
+}
+
+func combineStringSlices(a, b []string) []string {
+	c := make([]string, len(a)+len(b))
+	if len(a) > 0 {
+		copy(c, a)
+	}
+	if len(b) > 0 {
+		copy(c[len(a):], b)
+	}
+
+	return c
+}
+
+func removeDuplicates(dedup []string) []string {
+	if len(dedup) <= 1 {
+		return dedup
+	}
+
+	for i := 0; i < len(dedup)-1; i++ {
+		for j := i + 1; j < len(dedup); j++ {
+			if dedup[i] != dedup[j] {
+				continue
+			}
+
+			if j != len(dedup)-1 {
+				dedup[j] = dedup[len(dedup)-1]
+				j--
+			}
+			dedup = dedup[:len(dedup)-1]
+		}
+	}
+
+	return dedup
+}
+
+var defaultTemplateImports = imports{
+	standard: importList{
+		`"bytes"`,
+		`"database/sql"`,
+		`"fmt"`,
+		`"reflect"`,
+		`"strings"`,
+		`"sync"`,
+		`"time"`,
+	},
+	thirdParty: importList{
+		`"github.com/pkg/errors"`,
+		`"github.com/vattle/sqlboiler/boil"`,
+		`"github.com/vattle/sqlboiler/queries"`,
+		`"github.com/vattle/sqlboiler/queries/qm"`,
+		`"github.com/vattle/sqlboiler/strmangle"`,
+	},
+}
+
+var defaultSingletonTemplateImports = map[string]imports{
+	"boil_queries": {
+		thirdParty: importList{
+			`"github.com/vattle/sqlboiler/boil"`,
+			`"github.com/vattle/sqlboiler/queries"`,
+			`"github.com/vattle/sqlboiler/queries/qm"`,
+		},
+	},
+	"boil_types": {
+		thirdParty: importList{
+			`"github.com/pkg/errors"`,
+			`"github.com/vattle/sqlboiler/strmangle"`,
+		},
+	},
+}
+
+var defaultTestTemplateImports = imports{
+	standard: importList{
+		`"bytes"`,
+		`"reflect"`,
+		`"testing"`,
+	},
+	thirdParty: importList{
+		`"github.com/vattle/sqlboiler/boil"`,
+		`"github.com/vattle/sqlboiler/randomize"`,
+		`"github.com/vattle/sqlboiler/strmangle"`,
+	},
+}
+
+var defaultSingletonTestTemplateImports = map[string]imports{
+	"boil_main_test": {
+		standard: importList{
+			`"database/sql"`,
+			`"flag"`,
+			`"fmt"`,
+			`"math/rand"`,
+			`"os"`,
+			`"path/filepath"`,
+			`"testing"`,
+			`"time"`,
+		},
+		thirdParty: importList{
+			`"github.com/kat-co/vala"`,
+			`"github.com/pkg/errors"`,
+			`"github.com/spf13/viper"`,
+			`"github.com/vattle/sqlboiler/boil"`,
+		},
+	},
+	"boil_queries_test": {
+		standard: importList{
+			`"bytes"`,
+			`"fmt"`,
+			`"io"`,
+			`"io/ioutil"`,
+			`"math/rand"`,
+			`"regexp"`,
+		},
+		thirdParty: importList{
+			`"github.com/vattle/sqlboiler/boil"`,
+		},
+	},
+	"boil_suites_test": {
+		standard: importList{
+			`"testing"`,
+		},
+	},
+}
+
+var defaultTestMainImports = map[string]imports{
+	"postgres": {
+		standard: importList{
+			`"bytes"`,
+			`"database/sql"`,
+			`"fmt"`,
+			`"io"`,
+			`"io/ioutil"`,
+			`"os"`,
+			`"os/exec"`,
+			`"strings"`,
+		},
+		thirdParty: importList{
+			`"github.com/pkg/errors"`,
+			`"github.com/spf13/viper"`,
+			`"github.com/vattle/sqlboiler/bdb/drivers"`,
+			`"github.com/vattle/sqlboiler/randomize"`,
+			`_ "github.com/lib/pq"`,
+		},
+	},
+	"mysql": {
+		standard: importList{
+			`"bytes"`,
+			`"database/sql"`,
+			`"fmt"`,
+			`"io"`,
+			`"io/ioutil"`,
+			`"os"`,
+			`"os/exec"`,
+			`"strings"`,
+		},
+		thirdParty: importList{
+			`"github.com/pkg/errors"`,
+			`"github.com/spf13/viper"`,
+			`"github.com/vattle/sqlboiler/bdb/drivers"`,
+			`"github.com/vattle/sqlboiler/randomize"`,
+			`_ "github.com/go-sql-driver/mysql"`,
+		},
+	},
+}
+
+// importsBasedOnType imports are only included in the template output if the
+// database requires one of the following special types. Check
+// TranslateColumnType to see the type assignments.
+var importsBasedOnType = map[string]imports{
+	"null.Float32": {
+		thirdParty: importList{`"gopkg.in/nullbio/null.v6"`},
+	},
+	"null.Float64": {
+		thirdParty: importList{`"gopkg.in/nullbio/null.v6"`},
+	},
+	"null.Int": {
+		thirdParty: importList{`"gopkg.in/nullbio/null.v6"`},
+	},
+	"null.Int8": {
+		thirdParty: importList{`"gopkg.in/nullbio/null.v6"`},
+	},
+	"null.Int16": {
+		thirdParty: importList{`"gopkg.in/nullbio/null.v6"`},
+	},
+	"null.Int32": {
+		thirdParty: importList{`"gopkg.in/nullbio/null.v6"`},
+	},
+	"null.Int64": {
+		thirdParty: importList{`"gopkg.in/nullbio/null.v6"`},
+	},
+	"null.Uint": {
+		thirdParty: importList{`"gopkg.in/nullbio/null.v6"`},
+	},
+	"null.Uint8": {
+		thirdParty: importList{`"gopkg.in/nullbio/null.v6"`},
+	},
+	"null.Uint16": {
+		thirdParty: importList{`"gopkg.in/nullbio/null.v6"`},
+	},
+	"null.Uint32": {
+		thirdParty: importList{`"gopkg.in/nullbio/null.v6"`},
+	},
+	"null.Uint64": {
+		thirdParty: importList{`"gopkg.in/nullbio/null.v6"`},
+	},
+	"null.String": {
+		thirdParty: importList{`"gopkg.in/nullbio/null.v6"`},
+	},
+	"null.Bool": {
+		thirdParty: importList{`"gopkg.in/nullbio/null.v6"`},
+	},
+	"null.Time": {
+		thirdParty: importList{`"gopkg.in/nullbio/null.v6"`},
+	},
+	"null.JSON": {
+		thirdParty: importList{`"gopkg.in/nullbio/null.v6"`},
+	},
+	"null.Bytes": {
+		thirdParty: importList{`"gopkg.in/nullbio/null.v6"`},
+	},
+	"time.Time": {
+		standard: importList{`"time"`},
+	},
+	"types.JSON": {
+		thirdParty: importList{`"github.com/vattle/sqlboiler/types"`},
+	},
+	"types.BytesArray": {
+		thirdParty: importList{`"github.com/vattle/sqlboiler/types"`},
+	},
+	"types.Int64Array": {
+		thirdParty: importList{`"github.com/vattle/sqlboiler/types"`},
+	},
+	"types.Float64Array": {
+		thirdParty: importList{`"github.com/vattle/sqlboiler/types"`},
+	},
+	"types.BoolArray": {
+		thirdParty: importList{`"github.com/vattle/sqlboiler/types"`},
+	},
+	"types.Hstore": {
+		thirdParty: importList{`"github.com/vattle/sqlboiler/types"`},
+	},
+}
diff --git a/imports_test.go b/imports_test.go
new file mode 100644
index 0000000..628d955
--- /dev/null
+++ b/imports_test.go
@@ -0,0 +1,222 @@
+package main
+
+import (
+	"reflect"
+	"sort"
+	"testing"
+
+	"github.com/pkg/errors"
+	"github.com/vattle/sqlboiler/bdb"
+)
+
+func TestImportsSort(t *testing.T) {
+	t.Parallel()
+
+	a1 := importList{
+		`"fmt"`,
+		`"errors"`,
+	}
+	a2 := importList{
+		`_ "github.com/lib/pq"`,
+		`_ "github.com/gorilla/n"`,
+		`"github.com/gorilla/mux"`,
+		`"github.com/gorilla/websocket"`,
+	}
+
+	a1Expected := importList{`"errors"`, `"fmt"`}
+	a2Expected := importList{
+		`"github.com/gorilla/mux"`,
+		`_ "github.com/gorilla/n"`,
+		`"github.com/gorilla/websocket"`,
+		`_ "github.com/lib/pq"`,
+	}
+
+	sort.Sort(a1)
+	if !reflect.DeepEqual(a1, a1Expected) {
+		t.Errorf("Expected a1 to match a1Expected, got: %v", a1)
+	}
+
+	for i, v := range a1 {
+		if v != a1Expected[i] {
+			t.Errorf("Expected a1[%d] to match a1Expected[%d]:\n%s\n%s\n", i, i, v, a1Expected[i])
+		}
+	}
+
+	sort.Sort(a2)
+	if !reflect.DeepEqual(a2, a2Expected) {
+		t.Errorf("Expected a2 to match a2expected, got: %v", a2)
+	}
+
+	for i, v := range a2 {
+		if v != a2Expected[i] {
+			t.Errorf("Expected a2[%d] to match a2Expected[%d]:\n%s\n%s\n", i, i, v, a1Expected[i])
+		}
+	}
+}
+
+func TestCombineTypeImports(t *testing.T) {
+	t.Parallel()
+
+	imports1 := imports{
+		standard: importList{
+			`"errors"`,
+			`"fmt"`,
+		},
+		thirdParty: importList{
+			`"github.com/vattle/sqlboiler/boil"`,
+		},
+	}
+
+	importsExpected := imports{
+		standard: importList{
+			`"errors"`,
+			`"fmt"`,
+			`"time"`,
+		},
+		thirdParty: importList{
+			`"github.com/vattle/sqlboiler/boil"`,
+			`"gopkg.in/nullbio/null.v6"`,
+		},
+	}
+
+	cols := []bdb.Column{
+		{
+			Type: "null.Time",
+		},
+		{
+			Type: "null.Time",
+		},
+		{
+			Type: "time.Time",
+		},
+		{
+			Type: "null.Float",
+		},
+	}
+
+	res1 := combineTypeImports(imports1, importsBasedOnType, cols)
+
+	if !reflect.DeepEqual(res1, importsExpected) {
+		t.Errorf("Expected res1 to match importsExpected, got:\n\n%#v\n", res1)
+	}
+
+	imports2 := imports{
+		standard: importList{
+			`"errors"`,
+			`"fmt"`,
+			`"time"`,
+		},
+		thirdParty: importList{
+			`"github.com/vattle/sqlboiler/boil"`,
+			`"gopkg.in/nullbio/null.v6"`,
+		},
+	}
+
+	res2 := combineTypeImports(imports2, importsBasedOnType, cols)
+
+	if !reflect.DeepEqual(res2, importsExpected) {
+		t.Errorf("Expected res2 to match importsExpected, got:\n\n%#v\n", res1)
+	}
+}
+
+func TestCombineImports(t *testing.T) {
+	t.Parallel()
+
+	a := imports{
+		standard:   importList{"fmt"},
+		thirdParty: importList{"github.com/vattle/sqlboiler", "gopkg.in/nullbio/null.v6"},
+	}
+	b := imports{
+		standard:   importList{"os"},
+		thirdParty: importList{"github.com/vattle/sqlboiler"},
+	}
+
+	c := combineImports(a, b)
+
+	if c.standard[0] != "fmt" && c.standard[1] != "os" {
+		t.Errorf("Wanted: fmt, os got: %#v", c.standard)
+	}
+	if c.thirdParty[0] != "github.com/vattle/sqlboiler" && c.thirdParty[1] != "gopkg.in/nullbio/null.v6" {
+		t.Errorf("Wanted: github.com/vattle/sqlboiler, gopkg.in/nullbio/null.v6 got: %#v", c.thirdParty)
+	}
+}
+
+func TestRemoveDuplicates(t *testing.T) {
+	t.Parallel()
+
+	hasDups := func(possible []string) error {
+		for i := 0; i < len(possible)-1; i++ {
+			for j := i + 1; j < len(possible); j++ {
+				if possible[i] == possible[j] {
+					return errors.Errorf("found duplicate: %s [%d] [%d]", possible[i], i, j)
+				}
+			}
+		}
+
+		return nil
+	}
+
+	if len(removeDuplicates([]string{})) != 0 {
+		t.Error("It should have returned an empty slice")
+	}
+
+	oneItem := []string{"patrick"}
+	slice := removeDuplicates(oneItem)
+	if ln := len(slice); ln != 1 {
+		t.Error("Length was wrong:", ln)
+	} else if oneItem[0] != slice[0] {
+		t.Errorf("Slices differ: %#v %#v", oneItem, slice)
+	}
+
+	slice = removeDuplicates([]string{"hello", "patrick", "hello"})
+	if ln := len(slice); ln != 2 {
+		t.Error("Length was wrong:", ln)
+	}
+	if err := hasDups(slice); err != nil {
+		t.Error(err)
+	}
+
+	slice = removeDuplicates([]string{"five", "patrick", "hello", "hello", "patrick", "hello", "hello"})
+	if ln := len(slice); ln != 3 {
+		t.Error("Length was wrong:", ln)
+	}
+	if err := hasDups(slice); err != nil {
+		t.Error(err)
+	}
+}
+
+func TestCombineStringSlices(t *testing.T) {
+	t.Parallel()
+
+	var a, b []string
+	slice := combineStringSlices(a, b)
+	if ln := len(slice); ln != 0 {
+		t.Error("Len was wrong:", ln)
+	}
+
+	a = []string{"1", "2"}
+	slice = combineStringSlices(a, b)
+	if ln := len(slice); ln != 2 {
+		t.Error("Len was wrong:", ln)
+	} else if slice[0] != a[0] || slice[1] != a[1] {
+		t.Errorf("Slice mismatch: %#v %#v", a, slice)
+	}
+
+	b = a
+	a = nil
+	slice = combineStringSlices(a, b)
+	if ln := len(slice); ln != 2 {
+		t.Error("Len was wrong:", ln)
+	} else if slice[0] != b[0] || slice[1] != b[1] {
+		t.Errorf("Slice mismatch: %#v %#v", b, slice)
+	}
+
+	a = b
+	b = []string{"3", "4"}
+	slice = combineStringSlices(a, b)
+	if ln := len(slice); ln != 4 {
+		t.Error("Len was wrong:", ln)
+	} else if slice[0] != a[0] || slice[1] != a[1] || slice[2] != b[0] || slice[3] != b[1] {
+		t.Errorf("Slice mismatch: %#v + %#v != #%v", a, b, slice)
+	}
+}
diff --git a/main.go b/main.go
index 8344e57..cf68812 100644
--- a/main.go
+++ b/main.go
@@ -8,17 +8,15 @@ import (
 	"strings"
 
 	"github.com/kat-co/vala"
-	"github.com/lbryio/sqlboiler/bdb/drivers"
-	"github.com/lbryio/sqlboiler/boilingcore"
 	"github.com/spf13/cobra"
 	"github.com/spf13/viper"
 )
 
-const sqlBoilerVersion = "2.4.0+lbry"
+const sqlBoilerVersion = "2.1.1"
 
 var (
-	cmdState  *boilingcore.State
-	cmdConfig *boilingcore.Config
+	cmdState  *State
+	cmdConfig *Config
 )
 
 func main() {
@@ -62,7 +60,7 @@ func main() {
 		Use:   "sqlboiler [flags] <driver>",
 		Short: "SQL Boiler generates an ORM tailored to your database schema.",
 		Long: "SQL Boiler generates a Go ORM from template files, tailored to your database schema.\n" +
-			`Complete documentation is available at http://github.com/lbryio/sqlboiler`,
+			`Complete documentation is available at http://github.com/vattle/sqlboiler`,
 		Example:       `sqlboiler postgres`,
 		PreRunE:       preRun,
 		RunE:          run,
@@ -73,30 +71,22 @@ func main() {
 
 	// Set up the cobra root command flags
 	rootCmd.PersistentFlags().StringP("output", "o", "models", "The name of the folder to output to")
-	rootCmd.PersistentFlags().StringP("schema", "s", "", "schema name for drivers that support it (default psql: public, mssql: dbo)")
+	rootCmd.PersistentFlags().StringP("schema", "s", "public", "The name of your database schema, for databases that support real schemas")
 	rootCmd.PersistentFlags().StringP("pkgname", "p", "models", "The name you wish to assign to your generated package")
 	rootCmd.PersistentFlags().StringP("basedir", "", "", "The base directory has the templates and templates_test folders")
 	rootCmd.PersistentFlags().StringSliceP("blacklist", "b", nil, "Do not include these tables in your generated package")
 	rootCmd.PersistentFlags().StringSliceP("whitelist", "w", nil, "Only include these tables in your generated package")
 	rootCmd.PersistentFlags().StringSliceP("tag", "t", nil, "Struct tags to be included on your models in addition to json, yaml, toml")
-	rootCmd.PersistentFlags().StringSliceP("replace", "", nil, "Replace templates by directory: relpath/to_file.tpl:relpath/to_replacement.tpl")
 	rootCmd.PersistentFlags().BoolP("debug", "d", false, "Debug mode prints stack traces on error")
 	rootCmd.PersistentFlags().BoolP("no-tests", "", false, "Disable generated go test files")
 	rootCmd.PersistentFlags().BoolP("no-hooks", "", false, "Disable hooks feature for your models")
 	rootCmd.PersistentFlags().BoolP("no-auto-timestamps", "", false, "Disable automatic timestamps for created_at/updated_at")
 	rootCmd.PersistentFlags().BoolP("version", "", false, "Print the version")
-	rootCmd.PersistentFlags().BoolP("tinyint-as-bool", "", false, "Map MySQL tinyint(1) in Go to bool instead of int8")
-	rootCmd.PersistentFlags().BoolP("wipe", "", false, "Delete the output folder (rm -rf) before generation to ensure sanity")
-
-	// hide flags not recommended for use
-	rootCmd.PersistentFlags().MarkHidden("replace")
 
 	viper.SetDefault("postgres.sslmode", "require")
 	viper.SetDefault("postgres.port", "5432")
 	viper.SetDefault("mysql.sslmode", "true")
 	viper.SetDefault("mysql.port", "3306")
-	viper.SetDefault("mssql.sslmode", "true")
-	viper.SetDefault("mssql.port", "1433")
 
 	viper.BindPFlags(rootCmd.PersistentFlags())
 	viper.AutomaticEnv()
@@ -130,7 +120,7 @@ func preRun(cmd *cobra.Command, args []string) error {
 
 	driverName := args[0]
 
-	cmdConfig = &boilingcore.Config{
+	cmdConfig = &Config{
 		DriverName:       driverName,
 		OutFolder:        viper.GetString("output"),
 		Schema:           viper.GetString("schema"),
@@ -140,7 +130,6 @@ func preRun(cmd *cobra.Command, args []string) error {
 		NoTests:          viper.GetBool("no-tests"),
 		NoHooks:          viper.GetBool("no-hooks"),
 		NoAutoTimestamps: viper.GetBool("no-auto-timestamps"),
-		Wipe:             viper.GetBool("wipe"),
 	}
 
 	// BUG: https://github.com/spf13/viper/issues/200
@@ -148,7 +137,7 @@ func preRun(cmd *cobra.Command, args []string) error {
 	// detect a malformed value coming out of viper.
 	// Once the bug is fixed we'll be able to move this into the init above
 	cmdConfig.BlacklistTables = viper.GetStringSlice("blacklist")
-	if len(cmdConfig.BlacklistTables) == 1 && strings.ContainsRune(cmdConfig.BlacklistTables[0], ',') {
+	if len(cmdConfig.BlacklistTables) == 1 && strings.HasPrefix(cmdConfig.BlacklistTables[0], "[") {
 		cmdConfig.BlacklistTables, err = cmd.PersistentFlags().GetStringSlice("blacklist")
 		if err != nil {
 			return err
@@ -156,7 +145,7 @@ func preRun(cmd *cobra.Command, args []string) error {
 	}
 
 	cmdConfig.WhitelistTables = viper.GetStringSlice("whitelist")
-	if len(cmdConfig.WhitelistTables) == 1 && strings.ContainsRune(cmdConfig.WhitelistTables[0], ',') {
+	if len(cmdConfig.WhitelistTables) == 1 && strings.HasPrefix(cmdConfig.WhitelistTables[0], "[") {
 		cmdConfig.WhitelistTables, err = cmd.PersistentFlags().GetStringSlice("whitelist")
 		if err != nil {
 			return err
@@ -164,23 +153,15 @@ func preRun(cmd *cobra.Command, args []string) error {
 	}
 
 	cmdConfig.Tags = viper.GetStringSlice("tag")
-	if len(cmdConfig.Tags) == 1 && strings.ContainsRune(cmdConfig.Tags[0], ',') {
+	if len(cmdConfig.Tags) == 1 && strings.HasPrefix(cmdConfig.Tags[0], "[") {
 		cmdConfig.Tags, err = cmd.PersistentFlags().GetStringSlice("tag")
 		if err != nil {
 			return err
 		}
 	}
 
-	cmdConfig.Replacements = viper.GetStringSlice("replace")
-	if len(cmdConfig.Replacements) == 1 && strings.ContainsRune(cmdConfig.Replacements[0], ',') {
-		cmdConfig.Replacements, err = cmd.PersistentFlags().GetStringSlice("replace")
-		if err != nil {
-			return err
-		}
-	}
-
 	if driverName == "postgres" {
-		cmdConfig.Postgres = boilingcore.PostgresConfig{
+		cmdConfig.Postgres = PostgresConfig{
 			User:    viper.GetString("postgres.user"),
 			Pass:    viper.GetString("postgres.pass"),
 			Host:    viper.GetString("postgres.host"),
@@ -202,10 +183,6 @@ func preRun(cmd *cobra.Command, args []string) error {
 			viper.Set("postgres.port", cmdConfig.Postgres.Port)
 		}
 
-		if len(cmdConfig.Schema) == 0 {
-			cmdConfig.Schema = "public"
-		}
-
 		err = vala.BeginValidation().Validate(
 			vala.StringNotEmpty(cmdConfig.Postgres.User, "postgres.user"),
 			vala.StringNotEmpty(cmdConfig.Postgres.Host, "postgres.host"),
@@ -220,7 +197,7 @@ func preRun(cmd *cobra.Command, args []string) error {
 	}
 
 	if driverName == "mysql" {
-		cmdConfig.MySQL = boilingcore.MySQLConfig{
+		cmdConfig.MySQL = MySQLConfig{
 			User:    viper.GetString("mysql.user"),
 			Pass:    viper.GetString("mysql.pass"),
 			Host:    viper.GetString("mysql.host"),
@@ -229,9 +206,6 @@ func preRun(cmd *cobra.Command, args []string) error {
 			SSLMode: viper.GetString("mysql.sslmode"),
 		}
 
-		// Set MySQL TinyintAsBool global var. This flag only applies to MySQL.
-		drivers.TinyintAsBool = viper.GetBool("tinyint-as-bool")
-
 		// MySQL doesn't have schemas, just databases
 		cmdConfig.Schema = cmdConfig.MySQL.DBName
 
@@ -261,47 +235,7 @@ func preRun(cmd *cobra.Command, args []string) error {
 		}
 	}
 
-	if driverName == "mssql" {
-		cmdConfig.MSSQL = boilingcore.MSSQLConfig{
-			User:    viper.GetString("mssql.user"),
-			Pass:    viper.GetString("mssql.pass"),
-			Host:    viper.GetString("mssql.host"),
-			Port:    viper.GetInt("mssql.port"),
-			DBName:  viper.GetString("mssql.dbname"),
-			SSLMode: viper.GetString("mssql.sslmode"),
-		}
-
-		// BUG: https://github.com/spf13/viper/issues/71
-		// Despite setting defaults, nested values don't get defaults
-		// Set them manually
-		if cmdConfig.MSSQL.SSLMode == "" {
-			cmdConfig.MSSQL.SSLMode = "true"
-			viper.Set("mssql.sslmode", cmdConfig.MSSQL.SSLMode)
-		}
-
-		if cmdConfig.MSSQL.Port == 0 {
-			cmdConfig.MSSQL.Port = 1433
-			viper.Set("mssql.port", cmdConfig.MSSQL.Port)
-		}
-
-		if len(cmdConfig.Schema) == 0 {
-			cmdConfig.Schema = "dbo"
-		}
-
-		err = vala.BeginValidation().Validate(
-			vala.StringNotEmpty(cmdConfig.MSSQL.User, "mssql.user"),
-			vala.StringNotEmpty(cmdConfig.MSSQL.Host, "mssql.host"),
-			vala.Not(vala.Equals(cmdConfig.MSSQL.Port, 0, "mssql.port")),
-			vala.StringNotEmpty(cmdConfig.MSSQL.DBName, "mssql.dbname"),
-			vala.StringNotEmpty(cmdConfig.MSSQL.SSLMode, "mssql.sslmode"),
-		).Check()
-
-		if err != nil {
-			return commandFailure(err.Error())
-		}
-	}
-
-	cmdState, err = boilingcore.New(cmdConfig)
+	cmdState, err = New(cmdConfig)
 	return err
 }
 
diff --git a/boilingcore/output.go b/output.go
similarity index 86%
rename from boilingcore/output.go
rename to output.go
index 5c65634..d6914c3 100644
--- a/boilingcore/output.go
+++ b/output.go
@@ -1,4 +1,4 @@
-package boilingcore
+package main
 
 import (
 	"bufio"
@@ -14,12 +14,6 @@ import (
 	"github.com/pkg/errors"
 )
 
-var noEditDisclaimer = []byte(`// This file is generated by SQLBoiler (https://github.com/lbryio/sqlboiler)
-// and is meant to be re-generated in place and/or deleted at any time.
-// DO NOT EDIT
-
-`)
-
 var (
 	// templateByteBuffer is re-used by all template construction to avoid
 	// allocating more memory than is needed. This will later be a problem for
@@ -38,7 +32,7 @@ func generateOutput(state *State, data *templateData) error {
 		state:                state,
 		data:                 data,
 		templates:            state.Templates,
-		importSet:            state.Importer.Standard,
+		importSet:            defaultTemplateImports,
 		combineImportsOnType: true,
 		fileSuffix:           ".go",
 	})
@@ -50,7 +44,7 @@ func generateTestOutput(state *State, data *templateData) error {
 		state:                state,
 		data:                 data,
 		templates:            state.TestTemplates,
-		importSet:            state.Importer.TestStandard,
+		importSet:            defaultTestTemplateImports,
 		combineImportsOnType: false,
 		fileSuffix:           "_test.go",
 	})
@@ -63,7 +57,7 @@ func generateSingletonOutput(state *State, data *templateData) error {
 		state:          state,
 		data:           data,
 		templates:      state.SingletonTemplates,
-		importNamedSet: state.Importer.Singleton,
+		importNamedSet: defaultSingletonTemplateImports,
 		fileSuffix:     ".go",
 	})
 }
@@ -75,7 +69,7 @@ func generateSingletonTestOutput(state *State, data *templateData) error {
 		state:          state,
 		data:           data,
 		templates:      state.SingletonTestTemplates,
-		importNamedSet: state.Importer.TestSingleton,
+		importNamedSet: defaultSingletonTestTemplateImports,
 		fileSuffix:     ".go",
 	})
 }
@@ -106,10 +100,9 @@ func executeTemplates(e executeTemplateData) error {
 	imps.standard = e.importSet.standard
 	imps.thirdParty = e.importSet.thirdParty
 	if e.combineImportsOnType {
-		imps = combineTypeImports(imps, e.state.Importer.BasedOnType, e.data.Table.Columns)
+		imps = combineTypeImports(imps, importsBasedOnType, e.data.Table.Columns)
 	}
 
-	writeFileDisclaimer(out)
 	writePackageName(out, e.state.Config.PkgName)
 	writeImports(out, imps)
 
@@ -145,7 +138,6 @@ func executeSingletonTemplates(e executeTemplateData) error {
 			thirdParty: e.importNamedSet[fName].thirdParty,
 		}
 
-		writeFileDisclaimer(out)
 		writePackageName(out, e.state.Config.PkgName)
 		writeImports(out, imps)
 
@@ -170,10 +162,9 @@ func generateTestMainOutput(state *State, data *templateData) error {
 	out.Reset()
 
 	var imps imports
-	imps.standard = state.Importer.TestMain[state.Config.DriverName].standard
-	imps.thirdParty = state.Importer.TestMain[state.Config.DriverName].thirdParty
+	imps.standard = defaultTestMainImports[state.Config.DriverName].standard
+	imps.thirdParty = defaultTestMainImports[state.Config.DriverName].thirdParty
 
-	writeFileDisclaimer(out)
 	writePackageName(out, state.Config.PkgName)
 	writeImports(out, imps)
 
@@ -188,12 +179,6 @@ func generateTestMainOutput(state *State, data *templateData) error {
 	return nil
 }
 
-// writeFileDisclaimer writes the disclaimer at the top with a trailing
-// newline so the package name doesn't get attached to it.
-func writeFileDisclaimer(out *bytes.Buffer) {
-	_, _ = out.Write(noEditDisclaimer)
-}
-
 // writePackageName writes the package name correctly, ignores errors
 // since it's to the concrete buffer type which produces none
 func writePackageName(out *bytes.Buffer, pkgName string) {
diff --git a/boilingcore/output_test.go b/output_test.go
similarity index 98%
rename from boilingcore/output_test.go
rename to output_test.go
index fa954e3..3a33eca 100644
--- a/boilingcore/output_test.go
+++ b/output_test.go
@@ -1,4 +1,4 @@
-package boilingcore
+package main
 
 import (
 	"bytes"
diff --git a/queries/eager_load.go b/queries/eager_load.go
index 8992290..18abcc8 100644
--- a/queries/eager_load.go
+++ b/queries/eager_load.go
@@ -5,9 +5,9 @@ import (
 	"reflect"
 	"strings"
 
-	"github.com/lbryio/sqlboiler/boil"
-	"github.com/lbryio/sqlboiler/strmangle"
 	"github.com/pkg/errors"
+	"github.com/vattle/sqlboiler/boil"
+	"github.com/vattle/sqlboiler/strmangle"
 )
 
 type loadRelationshipState struct {
@@ -206,16 +206,9 @@ func (l loadRelationshipState) loadRelationshipsRecurse(depth int, obj reflect.V
 	}
 
 	bkind := kindStruct
-	if derefed := reflect.Indirect(loadedObject); derefed.Kind() != reflect.Struct {
+	if reflect.Indirect(loadedObject).Kind() != reflect.Struct {
 		bkind = kindPtrSliceStruct
-
-		// Convert away any helper slice types
-		// elemType is *elem (from []*elem or helperSliceType)
-		// sliceType is *[]*elem
-		elemType := derefed.Type().Elem()
-		sliceType := reflect.PtrTo(reflect.SliceOf(elemType))
-
-		loadedObject = loadedObject.Addr().Convert(sliceType)
+		loadedObject = loadedObject.Addr()
 	}
 	return l.loadRelationships(depth+1, loadedObject.Interface(), bkind)
 }
@@ -248,9 +241,6 @@ func collectLoaded(key string, loadingFrom reflect.Value) (reflect.Value, bindKi
 	if loadedType.Elem().Kind() == reflect.Struct {
 		bkind = kindStruct
 		loadedType = reflect.SliceOf(loadedType)
-	} else {
-		// Ensure that we get rid of all the helper "XSlice" types
-		loadedType = reflect.SliceOf(loadedType.Elem())
 	}
 
 	collection := reflect.MakeSlice(loadedType, 0, 0)
@@ -259,13 +249,9 @@ func collectLoaded(key string, loadingFrom reflect.Value) (reflect.Value, bindKi
 	for {
 		switch bkind {
 		case kindStruct:
-			if !loadedObject.IsNil() {
-				collection = reflect.Append(collection, loadedObject)
-			}
+			collection = reflect.Append(collection, loadedObject)
 		case kindPtrSliceStruct:
-			if !loadedObject.IsNil() {
-				collection = reflect.AppendSlice(collection, loadedObject)
-			}
+			collection = reflect.AppendSlice(collection, loadedObject)
 		}
 
 		i++
diff --git a/queries/eager_load_test.go b/queries/eager_load_test.go
index c7b3cef..dc3f5f1 100644
--- a/queries/eager_load_test.go
+++ b/queries/eager_load_test.go
@@ -4,7 +4,7 @@ import (
 	"fmt"
 	"testing"
 
-	"github.com/lbryio/sqlboiler/boil"
+	"github.com/vattle/sqlboiler/boil"
 )
 
 var testEagerCounters struct {
diff --git a/queries/helpers.go b/queries/helpers.go
index b953a67..59ad8a3 100644
--- a/queries/helpers.go
+++ b/queries/helpers.go
@@ -4,7 +4,7 @@ import (
 	"fmt"
 	"reflect"
 
-	"github.com/lbryio/sqlboiler/strmangle"
+	"github.com/vattle/sqlboiler/strmangle"
 )
 
 // NonZeroDefaultSet returns the fields included in the
diff --git a/queries/helpers_test.go b/queries/helpers_test.go
index fd093e3..d37fcd9 100644
--- a/queries/helpers_test.go
+++ b/queries/helpers_test.go
@@ -5,7 +5,7 @@ import (
 	"testing"
 	"time"
 
-	null "github.com/lbryio/lbry.go/v2/extras/null"
+	null "gopkg.in/nullbio/null.v6"
 )
 
 type testObj struct {
diff --git a/queries/qm/query_mods.go b/queries/qm/query_mods.go
index 42e881e..81ae3a0 100644
--- a/queries/qm/query_mods.go
+++ b/queries/qm/query_mods.go
@@ -1,6 +1,6 @@
 package qm
 
-import "github.com/lbryio/sqlboiler/queries"
+import "github.com/vattle/sqlboiler/queries"
 
 // QueryMod to modify the query object
 type QueryMod func(q *queries.Query)
@@ -8,9 +8,7 @@ type QueryMod func(q *queries.Query)
 // Apply the query mods to the Query object
 func Apply(q *queries.Query, mods ...QueryMod) {
 	for _, mod := range mods {
-		if mod != nil {
-			mod(q)
-		}
+		mod(q)
 	}
 }
 
@@ -125,12 +123,6 @@ func From(from string) QueryMod {
 	}
 }
 
-func ForceIndex(index string) QueryMod {
-	return func(q *queries.Query) {
-		queries.SetForceIndex(q, index)
-	}
-}
-
 // Limit the number of returned rows
 func Limit(limit int) QueryMod {
 	return func(q *queries.Query) {
diff --git a/queries/query.go b/queries/query.go
index e3a6470..5c2f3c9 100644
--- a/queries/query.go
+++ b/queries/query.go
@@ -4,8 +4,7 @@ import (
 	"database/sql"
 	"fmt"
 
-	"github.com/lbryio/lbry.go/v2/extras/errors"
-	"github.com/lbryio/sqlboiler/boil"
+	"github.com/vattle/sqlboiler/boil"
 )
 
 // joinKind is the type of join
@@ -30,7 +29,6 @@ type Query struct {
 	selectCols []string
 	count      bool
 	from       []string
-	forceindex string
 	joins      []join
 	where      []where
 	in         []in
@@ -54,9 +52,6 @@ type Dialect struct {
 	// Bool flag indicating whether indexed
 	// placeholders ($1) are used, or ? placeholders.
 	IndexPlaceholders bool
-	// Bool flag indicating whether "TOP" or "LIMIT" clause
-	// must be used for rows limitation
-	UseTopClause bool
 }
 
 type where struct {
@@ -138,7 +133,7 @@ func (q *Query) Query() (*sql.Rows, error) {
 func (q *Query) ExecP() sql.Result {
 	res, err := q.Exec()
 	if err != nil {
-		panic(errors.Err(err))
+		panic(boil.WrapErr(err))
 	}
 
 	return res
@@ -149,7 +144,7 @@ func (q *Query) ExecP() sql.Result {
 func (q *Query) QueryP() *sql.Rows {
 	rows, err := q.Query()
 	if err != nil {
-		panic(errors.Err(err))
+		panic(boil.WrapErr(err))
 	}
 
 	return rows
@@ -264,11 +259,6 @@ func SetLastWhereAsOr(q *Query) {
 	q.where[len(q.where)-1].orSeparator = true
 }
 
-// SetForceIndex sets the index to be used by the query
-func SetForceIndex(q *Query, index string){
-	q.forceindex = index
-}
-
 // SetLastInAsOr sets the or separator for the tail "IN" in the slice
 func SetLastInAsOr(q *Query) {
 	if len(q.in) == 0 {
diff --git a/queries/query_builders.go b/queries/query_builders.go
index e682877..081ca58 100644
--- a/queries/query_builders.go
+++ b/queries/query_builders.go
@@ -7,7 +7,7 @@ import (
 	"sort"
 	"strings"
 
-	"github.com/lbryio/sqlboiler/strmangle"
+	"github.com/vattle/sqlboiler/strmangle"
 )
 
 var (
@@ -46,12 +46,6 @@ func buildSelectQuery(q *Query) (*bytes.Buffer, []interface{}) {
 
 	buf.WriteString("SELECT ")
 
-	if q.dialect.UseTopClause {
-		if q.limit != 0 && q.offset == 0 {
-			fmt.Fprintf(buf, " TOP (%d) ", q.limit)
-		}
-	}
-
 	if q.count {
 		buf.WriteString("COUNT(")
 	}
@@ -76,13 +70,7 @@ func buildSelectQuery(q *Query) (*bytes.Buffer, []interface{}) {
 		buf.WriteByte(')')
 	}
 
-	if len(q.forceindex) > 0 {
-		fmt.Fprintf(buf, " FROM %s FORCE INDEX (%s)", strings.Join(strmangle.IdentQuoteSlice(q.dialect.LQ, q.dialect.RQ, q.from), ", "),q.forceindex)
-
-	}else{
-		fmt.Fprintf(buf, " FROM %s", strings.Join(strmangle.IdentQuoteSlice(q.dialect.LQ, q.dialect.RQ, q.from), ", "))
-
-	}
+	fmt.Fprintf(buf, " FROM %s", strings.Join(strmangle.IdentQuoteSlice(q.dialect.LQ, q.dialect.RQ, q.from), ", "))
 
 	if len(q.joins) > 0 {
 		argsLen := len(args)
@@ -196,23 +184,18 @@ func buildUpdateQuery(q *Query) (*bytes.Buffer, []interface{}) {
 }
 
 // BuildUpsertQueryMySQL builds a SQL statement string using the upsertData provided.
-func BuildUpsertQueryMySQL(dia Dialect, tableName string, update, whitelist []string, autoIncrementCol string) string {
+func BuildUpsertQueryMySQL(dia Dialect, tableName string, update, whitelist []string) string {
 	whitelist = strmangle.IdentQuoteSlice(dia.LQ, dia.RQ, whitelist)
 
 	buf := strmangle.GetBuffer()
 	defer strmangle.PutBuffer(buf)
 
-	var columns string
-	if len(whitelist) != 0 {
-		columns = strings.Join(whitelist, ", ")
-	}
-
 	if len(update) == 0 {
 		fmt.Fprintf(
 			buf,
 			"INSERT IGNORE INTO %s (%s) VALUES (%s)",
 			tableName,
-			columns,
+			strings.Join(whitelist, ", "),
 			strmangle.Placeholders(dia.IndexPlaceholders, len(whitelist), 1, 1),
 		)
 		return buf.String()
@@ -222,15 +205,10 @@ func BuildUpsertQueryMySQL(dia Dialect, tableName string, update, whitelist []st
 		buf,
 		"INSERT INTO %s (%s) VALUES (%s) ON DUPLICATE KEY UPDATE ",
 		tableName,
-		columns,
+		strings.Join(whitelist, ", "),
 		strmangle.Placeholders(dia.IndexPlaceholders, len(whitelist), 1, 1),
 	)
 
-	// https://stackoverflow.com/questions/778534/mysql-on-duplicate-key-last-insert-id
-	if autoIncrementCol != "" {
-		buf.WriteString(autoIncrementCol + " = LAST_INSERT_ID(" + autoIncrementCol + "), ")
-	}
-
 	for i, v := range update {
 		if i != 0 {
 			buf.WriteByte(',')
@@ -254,18 +232,12 @@ func BuildUpsertQueryPostgres(dia Dialect, tableName string, updateOnConflict bo
 	buf := strmangle.GetBuffer()
 	defer strmangle.PutBuffer(buf)
 
-	columns := "DEFAULT VALUES"
-	if len(whitelist) != 0 {
-		columns = fmt.Sprintf("(%s) VALUES (%s)",
-			strings.Join(whitelist, ", "),
-			strmangle.Placeholders(dia.IndexPlaceholders, len(whitelist), 1, 1))
-	}
-
 	fmt.Fprintf(
 		buf,
-		"INSERT INTO %s %s ON CONFLICT ",
+		"INSERT INTO %s (%s) VALUES (%s) ON CONFLICT ",
 		tableName,
-		columns,
+		strings.Join(whitelist, ", "),
+		strmangle.Placeholders(dia.IndexPlaceholders, len(whitelist), 1, 1),
 	)
 
 	if !updateOnConflict || len(update) == 0 {
@@ -294,49 +266,6 @@ func BuildUpsertQueryPostgres(dia Dialect, tableName string, updateOnConflict bo
 	return buf.String()
 }
 
-// BuildUpsertQueryMSSQL builds a SQL statement string using the upsertData provided.
-func BuildUpsertQueryMSSQL(dia Dialect, tableName string, primary, update, insert []string, output []string) string {
-	insert = strmangle.IdentQuoteSlice(dia.LQ, dia.RQ, insert)
-
-	buf := strmangle.GetBuffer()
-	defer strmangle.PutBuffer(buf)
-
-	startIndex := 1
-
-	fmt.Fprintf(buf, "MERGE INTO %s as [t]\n", tableName)
-	fmt.Fprintf(buf, "USING (SELECT %s) as [s] ([%s])\n",
-		strmangle.Placeholders(dia.IndexPlaceholders, len(primary), startIndex, 1),
-		strings.Join(primary, string(dia.RQ)+","+string(dia.LQ)))
-	fmt.Fprint(buf, "ON (")
-	for i, v := range primary {
-		if i != 0 {
-			fmt.Fprint(buf, " AND ")
-		}
-		fmt.Fprintf(buf, "[s].[%s] = [t].[%s]", v, v)
-	}
-	fmt.Fprint(buf, ")\n")
-
-	startIndex += len(primary)
-
-	fmt.Fprint(buf, "WHEN MATCHED THEN ")
-	fmt.Fprintf(buf, "UPDATE SET %s\n", strmangle.SetParamNames(string(dia.LQ), string(dia.RQ), startIndex, update))
-
-	startIndex += len(update)
-
-	fmt.Fprint(buf, "WHEN NOT MATCHED THEN ")
-	fmt.Fprintf(buf, "INSERT (%s) VALUES (%s)",
-		strings.Join(insert, ", "),
-		strmangle.Placeholders(dia.IndexPlaceholders, len(insert), startIndex, 1))
-
-	if len(output) > 0 {
-		fmt.Fprintf(buf, "\nOUTPUT INSERTED.[%s];", strings.Join(output, "],INSERTED.["))
-	} else {
-		fmt.Fprint(buf, ";")
-	}
-
-	return buf.String()
-}
-
 func writeModifiers(q *Query, buf *bytes.Buffer, args *[]interface{}) {
 	if len(q.groupBy) != 0 {
 		fmt.Fprintf(buf, " GROUP BY %s", strings.Join(q.groupBy, ", "))
@@ -368,36 +297,11 @@ func writeModifiers(q *Query, buf *bytes.Buffer, args *[]interface{}) {
 		buf.WriteString(strings.Join(q.orderBy, ", "))
 	}
 
-	if !q.dialect.UseTopClause {
-		if q.limit != 0 {
-			fmt.Fprintf(buf, " LIMIT %d", q.limit)
-		}
-
-		if q.offset != 0 {
-			fmt.Fprintf(buf, " OFFSET %d", q.offset)
-		}
-	} else {
-		// From MS SQL 2012 and above: https://technet.microsoft.com/en-us/library/ms188385(v=sql.110).aspx
-		// ORDER BY ...
-		// OFFSET N ROWS
-		// FETCH NEXT M ROWS ONLY
-		if q.offset != 0 {
-
-			// Hack from https://www.microsoftpressstore.com/articles/article.aspx?p=2314819
-			// ...
-			// As mentioned, the OFFSET-FETCH filter requires an ORDER BY clause. If you want to use arbitrary order,
-			// like TOP without an ORDER BY clause, you can use the trick with ORDER BY (SELECT NULL)
-			// ...
-			if len(q.orderBy) == 0 {
-				buf.WriteString(" ORDER BY (SELECT NULL)")
-			}
-
-			fmt.Fprintf(buf, " OFFSET %d", q.offset)
-
-			if q.limit != 0 {
-				fmt.Fprintf(buf, " FETCH NEXT %d ROWS ONLY", q.limit)
-			}
-		}
+	if q.limit != 0 {
+		fmt.Fprintf(buf, " LIMIT %d", q.limit)
+	}
+	if q.offset != 0 {
+		fmt.Fprintf(buf, " OFFSET %d", q.offset)
 	}
 
 	if len(q.forlock) != 0 {
diff --git a/queries/reflect.go b/queries/reflect.go
index 39eda23..9c55b33 100644
--- a/queries/reflect.go
+++ b/queries/reflect.go
@@ -7,9 +7,9 @@ import (
 	"strings"
 	"sync"
 
-	"github.com/lbryio/sqlboiler/strmangle"
-
 	"github.com/pkg/errors"
+	"github.com/vattle/sqlboiler/boil"
+	"github.com/vattle/sqlboiler/strmangle"
 )
 
 var (
@@ -41,7 +41,7 @@ const (
 // It panics on error. See boil.Bind() documentation.
 func (q *Query) BindP(obj interface{}) {
 	if err := q.Bind(obj); err != nil {
-		panic(errors.WithStack(err))
+		panic(boil.WrapErr(err))
 	}
 }
 
diff --git a/randomize/randomize.go b/randomize/randomize.go
index 30592bf..df5d75e 100644
--- a/randomize/randomize.go
+++ b/randomize/randomize.go
@@ -4,7 +4,6 @@ package randomize
 import (
 	"database/sql"
 	"fmt"
-	"math"
 	"math/rand"
 	"reflect"
 	"regexp"
@@ -14,12 +13,12 @@ import (
 	"sync/atomic"
 	"time"
 
-	null "github.com/lbryio/lbry.go/v2/extras/null"
+	null "gopkg.in/nullbio/null.v6"
 
 	"github.com/pkg/errors"
 	"github.com/satori/go.uuid"
-	"github.com/lbryio/sqlboiler/strmangle"
-	"github.com/lbryio/sqlboiler/types"
+	"github.com/vattle/sqlboiler/strmangle"
+	"github.com/vattle/sqlboiler/types"
 )
 
 var (
@@ -72,7 +71,7 @@ func NewSeed() *Seed {
 }
 
 func (s *Seed) nextInt() int {
-	return int(atomic.AddInt64((*int64)(s), 1) % math.MaxInt32)
+	return int(atomic.AddInt64((*int64)(s), 1))
 }
 
 // Struct gets its fields filled with random data based on the seed.
@@ -502,23 +501,23 @@ func getStructRandValue(s *Seed, typ reflect.Type) interface{} {
 	case typeNullFloat64:
 		return null.NewFloat64(float64(s.nextInt()%10)/10.0+float64(s.nextInt()%10), true)
 	case typeNullInt:
-		return null.NewInt(int(int32(s.nextInt()%math.MaxInt32)), true)
+		return null.NewInt(int(int32(s.nextInt())), true)
 	case typeNullInt8:
-		return null.NewInt8(int8(s.nextInt()%math.MaxInt8), true)
+		return null.NewInt8(int8(s.nextInt()), true)
 	case typeNullInt16:
-		return null.NewInt16(int16(s.nextInt()%math.MaxInt16), true)
+		return null.NewInt16(int16(s.nextInt()), true)
 	case typeNullInt32:
-		return null.NewInt32(int32(s.nextInt()%math.MaxInt32), true)
+		return null.NewInt32(int32(s.nextInt()), true)
 	case typeNullInt64:
 		return null.NewInt64(int64(s.nextInt()), true)
 	case typeNullUint:
 		return null.NewUint(uint(s.nextInt()), true)
 	case typeNullUint8:
-		return null.NewUint8(uint8(s.nextInt()%math.MaxUint8), true)
+		return null.NewUint8(uint8(s.nextInt()), true)
 	case typeNullUint16:
-		return null.NewUint16(uint16(s.nextInt()%math.MaxUint16), true)
+		return null.NewUint16(uint16(s.nextInt()), true)
 	case typeNullUint32:
-		return null.NewUint32(uint32(s.nextInt()%math.MaxUint32), true)
+		return null.NewUint32(uint32(s.nextInt()), true)
 	case typeNullUint64:
 		return null.NewUint64(uint64(s.nextInt()), true)
 	case typeNullBytes:
@@ -591,21 +590,21 @@ func getVariableRandValue(s *Seed, kind reflect.Kind, typ reflect.Type) interfac
 	case reflect.Int:
 		return s.nextInt()
 	case reflect.Int8:
-		return int8(s.nextInt() % math.MaxInt8)
+		return int8(s.nextInt())
 	case reflect.Int16:
-		return int16(s.nextInt() % math.MaxInt16)
+		return int16(s.nextInt())
 	case reflect.Int32:
-		return int32(s.nextInt() % math.MaxInt32)
+		return int32(s.nextInt())
 	case reflect.Int64:
 		return int64(s.nextInt())
 	case reflect.Uint:
 		return uint(s.nextInt())
 	case reflect.Uint8:
-		return uint8(s.nextInt() % math.MaxUint8)
+		return uint8(s.nextInt())
 	case reflect.Uint16:
-		return uint16(s.nextInt() % math.MaxUint16)
+		return uint16(s.nextInt())
 	case reflect.Uint32:
-		return uint32(s.nextInt() % math.MaxUint32)
+		return uint32(s.nextInt())
 	case reflect.Uint64:
 		return uint64(s.nextInt())
 	case reflect.Bool:
diff --git a/randomize/randomize_test.go b/randomize/randomize_test.go
index 71afb55..6f117b7 100644
--- a/randomize/randomize_test.go
+++ b/randomize/randomize_test.go
@@ -5,7 +5,7 @@ import (
 	"testing"
 	"time"
 
-	null "github.com/lbryio/lbry.go/v2/extras/null"
+	null "gopkg.in/nullbio/null.v6"
 )
 
 func TestRandomizeStruct(t *testing.T) {
diff --git a/boilingcore/boilingcore.go b/sqlboiler.go
similarity index 75%
rename from boilingcore/boilingcore.go
rename to sqlboiler.go
index e667cae..0c52dce 100644
--- a/boilingcore/boilingcore.go
+++ b/sqlboiler.go
@@ -1,6 +1,6 @@
-// Package boilingcore has types and methods useful for generating code that
+// Package sqlboiler has types and methods useful for generating code that
 // acts as a fully dynamic ORM might.
-package boilingcore
+package main
 
 import (
 	"encoding/json"
@@ -13,10 +13,10 @@ import (
 	"text/template"
 
 	"github.com/pkg/errors"
-	"github.com/lbryio/sqlboiler/bdb"
-	"github.com/lbryio/sqlboiler/bdb/drivers"
-	"github.com/lbryio/sqlboiler/queries"
-	"github.com/lbryio/sqlboiler/strmangle"
+	"github.com/vattle/sqlboiler/bdb"
+	"github.com/vattle/sqlboiler/bdb/drivers"
+	"github.com/vattle/sqlboiler/queries"
+	"github.com/vattle/sqlboiler/strmangle"
 )
 
 const (
@@ -43,8 +43,6 @@ type State struct {
 	SingletonTestTemplates *templateList
 
 	TestMainTemplate *template.Template
-
-	Importer importer
 }
 
 // New creates a new state based off of the config
@@ -91,8 +89,6 @@ func New(config *Config) (*State, error) {
 		return nil, errors.Wrap(err, "unable to initialize struct tags")
 	}
 
-	s.Importer = newImporter()
-
 	return s, nil
 }
 
@@ -208,66 +204,10 @@ func (s *State) initTemplates() error {
 		}
 	}
 
-	return s.processReplacements()
-}
-
-// processReplacements loads any replacement templates
-func (s *State) processReplacements() error {
-	basePath, err := getBasePath(s.Config.BaseDir)
-	if err != nil {
-		return err
-	}
-
-	for _, replace := range s.Config.Replacements {
-		splits := strings.Split(replace, ":")
-		if len(splits) != 2 {
-			return errors.Errorf("replace parameters must have 2 arguments, given: %s", replace)
-		}
-
-		var toReplaceFname string
-		toReplace, replaceWith := splits[0], splits[1]
-
-		inf, err := os.Stat(filepath.Join(basePath, toReplace))
-		if err != nil {
-			return errors.Errorf("cannot stat %q", toReplace)
-		}
-		if inf.IsDir() {
-			return errors.Errorf("replace argument must be a path to a file not a dir: %q", toReplace)
-		}
-		toReplaceFname = inf.Name()
-
-		inf, err = os.Stat(replaceWith)
-		if err != nil {
-			return errors.Errorf("cannot stat %q", replaceWith)
-		}
-		if inf.IsDir() {
-			return errors.Errorf("replace argument must be a path to a file not a dir: %q", replaceWith)
-		}
-
-		switch filepath.Dir(toReplace) {
-		case templatesDirectory:
-			err = replaceTemplate(s.Templates.Template, toReplaceFname, replaceWith)
-		case templatesSingletonDirectory:
-			err = replaceTemplate(s.SingletonTemplates.Template, toReplaceFname, replaceWith)
-		case templatesTestDirectory:
-			err = replaceTemplate(s.TestTemplates.Template, toReplaceFname, replaceWith)
-		case templatesSingletonTestDirectory:
-			err = replaceTemplate(s.SingletonTestTemplates.Template, toReplaceFname, replaceWith)
-		case templatesTestMainDirectory:
-			err = replaceTemplate(s.TestMainTemplate, toReplaceFname, replaceWith)
-		default:
-			return errors.Errorf("replace file's directory not part of any known folder: %s", toReplace)
-		}
-
-		if err != nil {
-			return err
-		}
-	}
-
 	return nil
 }
 
-var basePackage = "github.com/lbryio/sqlboiler"
+var basePackage = "github.com/vattle/sqlboiler"
 
 func getBasePath(baseDirConfig string) (string, error) {
 	if len(baseDirConfig) > 0 {
@@ -305,15 +245,6 @@ func (s *State) initDriver(driverName string) error {
 			s.Config.MySQL.Port,
 			s.Config.MySQL.SSLMode,
 		)
-	case "mssql":
-		s.Driver = drivers.NewMSSQLDriver(
-			s.Config.MSSQL.User,
-			s.Config.MSSQL.Pass,
-			s.Config.MSSQL.DBName,
-			s.Config.MSSQL.Host,
-			s.Config.MSSQL.Port,
-			s.Config.MSSQL.SSLMode,
-		)
 	case "mock":
 		s.Driver = &drivers.MockDriver{}
 	}
@@ -325,7 +256,6 @@ func (s *State) initDriver(driverName string) error {
 	s.Dialect.LQ = s.Driver.LeftQuote()
 	s.Dialect.RQ = s.Driver.RightQuote()
 	s.Dialect.IndexPlaceholders = s.Driver.IndexPlaceholders()
-	s.Dialect.UseTopClause = s.Driver.UseTopClause()
 
 	return nil
 }
@@ -367,12 +297,6 @@ func (s *State) initTags(tags []string) error {
 
 // initOutFolder creates the folder that will hold the generated output.
 func (s *State) initOutFolder() error {
-	if s.Config.Wipe {
-		if err := os.RemoveAll(s.Config.OutFolder); err != nil {
-			return err
-		}
-	}
-
 	return os.MkdirAll(s.Config.OutFolder, os.ModePerm)
 }
 
diff --git a/boilingcore/boilingcore_test.go b/sqlboiler_test.go
similarity index 99%
rename from boilingcore/boilingcore_test.go
rename to sqlboiler_test.go
index f9e7182..367e429 100644
--- a/boilingcore/boilingcore_test.go
+++ b/sqlboiler_test.go
@@ -1,4 +1,4 @@
-package boilingcore
+package main
 
 import (
 	"bufio"
diff --git a/strmangle/inflect.go b/strmangle/inflect.go
index 6238c39..78945b6 100644
--- a/strmangle/inflect.go
+++ b/strmangle/inflect.go
@@ -92,10 +92,6 @@ func newBoilRuleset() *inflect.Ruleset {
 	rs.AddPluralExact("oxen", "oxen", true)
 	rs.AddPluralExact("quiz", "quizzes", true)
 	rs.AddSingular("s", "")
-	rs.AddSingular("ss", "ss")
-	rs.AddSingular("as", "as")
-	rs.AddSingular("us", "us")
-	rs.AddSingular("is", "is")
 	rs.AddSingular("news", "news")
 	rs.AddSingular("ta", "tum")
 	rs.AddSingular("ia", "ium")
@@ -188,15 +184,5 @@ func newBoilRuleset() *inflect.Ruleset {
 	rs.AddIrregular("move", "moves")
 	rs.AddIrregular("zombie", "zombies")
 	rs.AddIrregular("cookie", "cookies")
-	rs.AddSingularExact("a", "a", true)
-	rs.AddSingularExact("i", "i", true)
-	rs.AddSingularExact("is", "is", true)
-	rs.AddSingularExact("us", "us", true)
-	rs.AddSingularExact("as", "as", true)
-	rs.AddPluralExact("a", "a", true)
-	rs.AddPluralExact("i", "i", true)
-	rs.AddPluralExact("is", "is", true)
-	rs.AddPluralExact("us", "us", true)
-	rs.AddPluralExact("as", "as", true)
 	return rs
 }
diff --git a/strmangle/strmangle.go b/strmangle/strmangle.go
index b330ac0..ab93d26 100644
--- a/strmangle/strmangle.go
+++ b/strmangle/strmangle.go
@@ -43,34 +43,6 @@ var uppercaseWords = map[string]struct{}{
 	"utf8":  {},
 }
 
-var reservedWords = map[string]struct{}{
-	"break":       {},
-	"case":        {},
-	"chan":        {},
-	"const":       {},
-	"continue":    {},
-	"default":     {},
-	"defer":       {},
-	"else":        {},
-	"fallthrough": {},
-	"for":         {},
-	"func":        {},
-	"go":          {},
-	"goto":        {},
-	"if":          {},
-	"import":      {},
-	"interface":   {},
-	"map":         {},
-	"package":     {},
-	"range":       {},
-	"return":      {},
-	"select":      {},
-	"struct":      {},
-	"switch":      {},
-	"type":        {},
-	"var":         {},
-}
-
 func init() {
 	// Our Boil inflection Ruleset does not include uncountable inflections.
 	// This way, people using words like Sheep will not have
@@ -82,11 +54,10 @@ func init() {
 
 // SchemaTable returns a table name with a schema prefixed if
 // using a database that supports real schemas, for example,
-// for Postgres: "schema_name"."table_name",
-// for MS SQL: [schema_name].[table_name], versus
+// for Postgres: "schema_name"."table_name", versus
 // simply "table_name" for MySQL (because it does not support real schemas)
 func SchemaTable(lq, rq string, driver string, schema string, table string) string {
-	if (driver == "postgres" && schema != "public") || driver == "mssql" {
+	if driver == "postgres" && schema != "public" {
 		return fmt.Sprintf(`%s%s%s.%s%s%s`, lq, schema, rq, lq, table, rq)
 	}
 
@@ -521,30 +492,6 @@ func WhereClause(lq, rq string, start int, cols []string) string {
 	return buf.String()
 }
 
-// WhereClauseRepeated returns the where clause repeated with OR clause using start as the $ flag index
-// For example, if start was 2 output would be: "(colthing=$2 AND colstuff=$3) OR (colthing=$4 AND colstuff=$5)"
-func WhereClauseRepeated(lq, rq string, start int, cols []string, count int) string {
-	var startIndex int
-	buf := GetBuffer()
-	defer PutBuffer(buf)
-	buf.WriteByte('(')
-	for i := 0; i < count; i++ {
-		if i != 0 {
-			buf.WriteString(") OR (")
-		}
-
-		startIndex = 0
-		if start > 0 {
-			startIndex = start + i*len(cols)
-		}
-
-		buf.WriteString(WhereClause(lq, rq, startIndex, cols))
-	}
-	buf.WriteByte(')')
-
-	return buf.String()
-}
-
 // JoinSlices merges two string slices of equal length
 func JoinSlices(sep string, a, b []string) []string {
 	lna, lnb := len(a), len(b)
@@ -683,12 +630,3 @@ func IsEnumNormal(values []string) bool {
 func ShouldTitleCaseEnum(value string) bool {
 	return rgxEnumShouldTitle.MatchString(value)
 }
-
-// ReplaceReservedWords takes a word and replaces it with word_ if it's found
-// in the list of reserved words.
-func ReplaceReservedWords(word string) string {
-	if _, ok := reservedWords[word]; ok {
-		return word + "_"
-	}
-	return word
-}
diff --git a/strmangle/strmangle_test.go b/strmangle/strmangle_test.go
index b07cee6..2a14af5 100644
--- a/strmangle/strmangle_test.go
+++ b/strmangle/strmangle_test.go
@@ -580,23 +580,3 @@ func TestShouldTitleCaseEnum(t *testing.T) {
 		}
 	}
 }
-
-func TestReplaceReservedWords(t *testing.T) {
-	tests := []struct {
-		Word    string
-		Replace bool
-	}{
-		{"break", true},
-		{"id", false},
-		{"type", true},
-	}
-
-	for i, test := range tests {
-		got := ReplaceReservedWords(test.Word)
-		if test.Replace && !strings.HasSuffix(got, "_") {
-			t.Errorf("%i) want suffixed (%s), got: %s", i, test.Word, got)
-		} else if !test.Replace && strings.HasSuffix(got, "_") {
-			t.Errorf("%i) want normal (%s), got: %s", i, test.Word, got)
-		}
-	}
-}
diff --git a/boilingcore/templates.go b/templates.go
similarity index 82%
rename from boilingcore/templates.go
rename to templates.go
index 86c78ce..9baae19 100644
--- a/boilingcore/templates.go
+++ b/templates.go
@@ -1,17 +1,15 @@
-package boilingcore
+package main
 
 import (
 	"fmt"
-	"io/ioutil"
 	"path/filepath"
 	"sort"
 	"strings"
 	"text/template"
 
-	"github.com/lbryio/sqlboiler/bdb"
-	"github.com/lbryio/sqlboiler/queries"
-	"github.com/lbryio/sqlboiler/strmangle"
-	"github.com/pkg/errors"
+	"github.com/vattle/sqlboiler/bdb"
+	"github.com/vattle/sqlboiler/queries"
+	"github.com/vattle/sqlboiler/strmangle"
 )
 
 // templateData for sqlboiler templates
@@ -111,7 +109,7 @@ func loadTemplates(dir string) (*templateList, error) {
 	return &templateList{Template: tpl}, err
 }
 
-// loadTemplate loads a single template file
+// loadTemplate loads a single template file.
 func loadTemplate(dir string, filename string) (*template.Template, error) {
 	pattern := filepath.Join(dir, filename)
 	tpl, err := template.New("").Funcs(templateFunctions).ParseFiles(pattern)
@@ -123,25 +121,6 @@ func loadTemplate(dir string, filename string) (*template.Template, error) {
 	return tpl.Lookup(filename), err
 }
 
-// replaceTemplate finds the template matching with name and replaces its
-// contents with the contents of the template located at filename
-func replaceTemplate(tpl *template.Template, name, filename string) error {
-	if tpl == nil {
-		return fmt.Errorf("template for %s is nil", name)
-	}
-
-	b, err := ioutil.ReadFile(filename)
-	if err != nil {
-		return errors.Wrapf(err, "failed reading template file: %s", filename)
-	}
-
-	if tpl, err = tpl.New(name).Funcs(templateFunctions).Parse(string(b)); err != nil {
-		return errors.Wrapf(err, "failed to parse template file: %s", filename)
-	}
-
-	return nil
-}
-
 // set is to stop duplication from named enums, allowing a template loop
 // to keep some state
 type once map[string]struct{}
@@ -168,8 +147,7 @@ func (o once) Put(s string) bool {
 // stringMap function.
 var templateStringMappers = map[string]func(string) string{
 	// String ops
-	"quoteWrap":       func(a string) string { return fmt.Sprintf(`"%s"`, a) },
-	"replaceReserved": strmangle.ReplaceReservedWords,
+	"quoteWrap": func(a string) string { return fmt.Sprintf(`"%s"`, a) },
 
 	// Casing
 	"titleCase": strmangle.TitleCase,
@@ -225,7 +203,6 @@ var templateFunctions = template.FuncMap{
 	"txtsFromToMany":   txtsFromToMany,
 
 	// dbdrivers ops
-	"filterColumnsByAuto":    bdb.FilterColumnsByAuto,
 	"filterColumnsByDefault": bdb.FilterColumnsByDefault,
 	"filterColumnsByEnum":    bdb.FilterColumnsByEnum,
 	"sqlColDefinitions":      bdb.SQLColDefinitions,
diff --git a/templates/00_struct.tpl b/templates/00_struct.tpl
index a2608fa..585feee 100644
--- a/templates/00_struct.tpl
+++ b/templates/00_struct.tpl
@@ -17,23 +17,6 @@ type {{$modelName}} struct {
 	{{end -}}
 }
 
-var {{$modelName}}Columns = struct {
-	{{range $column := .Table.Columns -}}
-	{{titleCase $column.Name}} string
-	{{end -}}
-}{
-	{{range $column := .Table.Columns -}}
-	{{titleCase $column.Name}}: "{{$column.Name}}",
-	{{end -}}
-}
-
-// {{$modelName}}Filter allows you to filter on any columns by making them all pointers.
-type {{$modelName}}Filter struct {
-	{{range $column := .Table.Columns -}}
-	{{titleCase $column.Name}} *{{$column.Type}} `{{generateTags $dot.Tags $column.Name}}boil:"{{$column.Name}}" json:"{{$column.Name}},omitempty" toml:"{{$column.Name}}" yaml:"{{$column.Name}},omitempty"`
-	{{end -}}
-}
-
 {{- if .Table.IsJoinTable -}}
 {{- else}}
 // {{$modelNameCamel}}R is where relationships are stored.
diff --git a/templates/01_types.tpl b/templates/01_types.tpl
index 5aac35f..edf742f 100644
--- a/templates/01_types.tpl
+++ b/templates/01_types.tpl
@@ -4,13 +4,9 @@
 {{- $tableNameSingular := .Table.Name | singular | titleCase -}}
 var (
 	{{$varNameSingular}}Columns               = []string{{"{"}}{{.Table.Columns | columnNames | stringMap .StringFuncs.quoteWrap | join ", "}}{{"}"}}
-	{{if eq .DriverName "mssql" -}}
-	{{$varNameSingular}}ColumnsWithAuto = []string{{"{"}}{{.Table.Columns | filterColumnsByAuto true | columnNames | stringMap .StringFuncs.quoteWrap | join ","}}{{"}"}}
-	{{end -}}
 	{{$varNameSingular}}ColumnsWithoutDefault = []string{{"{"}}{{.Table.Columns | filterColumnsByDefault false | columnNames | stringMap .StringFuncs.quoteWrap | join ","}}{{"}"}}
 	{{$varNameSingular}}ColumnsWithDefault    = []string{{"{"}}{{.Table.Columns | filterColumnsByDefault true | columnNames | stringMap .StringFuncs.quoteWrap | join ","}}{{"}"}}
 	{{$varNameSingular}}PrimaryKeyColumns     = []string{{"{"}}{{.Table.PKey.Columns | stringMap .StringFuncs.quoteWrap | join ", "}}{{"}"}}
-	{{$varNameSingular}}AutoIncrementColumn   = "{{.Table.AutoIncrementColumn }}"
 )
 
 type (
@@ -22,7 +18,7 @@ type (
 	{{$tableNameSingular}}Hook func(boil.Executor, *{{$tableNameSingular}}) error
 	{{- end}}
 
-	{{$tableNameSingular}}Query struct {
+	{{$varNameSingular}}Query struct {
 		*queries.Query
 	}
 )
diff --git a/templates/02_hooks.tpl b/templates/02_hooks.tpl
index d152978..9815639 100644
--- a/templates/02_hooks.tpl
+++ b/templates/02_hooks.tpl
@@ -16,7 +16,7 @@ var {{$varNameSingular}}AfterUpsertHooks []{{$tableNameSingular}}Hook
 func (o *{{$tableNameSingular}}) doBeforeInsertHooks(exec boil.Executor) (err error) {
 	for _, hook := range {{$varNameSingular}}BeforeInsertHooks {
 		if err := hook(exec, o); err != nil {
-			return errors.Err(err)
+			return err
 		}
 	}
 
@@ -27,7 +27,7 @@ func (o *{{$tableNameSingular}}) doBeforeInsertHooks(exec boil.Executor) (err er
 func (o *{{$tableNameSingular}}) doBeforeUpdateHooks(exec boil.Executor) (err error) {
 	for _, hook := range {{$varNameSingular}}BeforeUpdateHooks {
 		if err := hook(exec, o); err != nil {
-			return errors.Err(err)
+			return err
 		}
 	}
 
@@ -38,7 +38,7 @@ func (o *{{$tableNameSingular}}) doBeforeUpdateHooks(exec boil.Executor) (err er
 func (o *{{$tableNameSingular}}) doBeforeDeleteHooks(exec boil.Executor) (err error) {
 	for _, hook := range {{$varNameSingular}}BeforeDeleteHooks {
 		if err := hook(exec, o); err != nil {
-			return errors.Err(err)
+			return err
 		}
 	}
 
@@ -49,7 +49,7 @@ func (o *{{$tableNameSingular}}) doBeforeDeleteHooks(exec boil.Executor) (err er
 func (o *{{$tableNameSingular}}) doBeforeUpsertHooks(exec boil.Executor) (err error) {
 	for _, hook := range {{$varNameSingular}}BeforeUpsertHooks {
 		if err := hook(exec, o); err != nil {
-			return errors.Err(err)
+			return err
 		}
 	}
 
@@ -60,7 +60,7 @@ func (o *{{$tableNameSingular}}) doBeforeUpsertHooks(exec boil.Executor) (err er
 func (o *{{$tableNameSingular}}) doAfterInsertHooks(exec boil.Executor) (err error) {
 	for _, hook := range {{$varNameSingular}}AfterInsertHooks {
 		if err := hook(exec, o); err != nil {
-			return errors.Err(err)
+			return err
 		}
 	}
 
@@ -71,7 +71,7 @@ func (o *{{$tableNameSingular}}) doAfterInsertHooks(exec boil.Executor) (err err
 func (o *{{$tableNameSingular}}) doAfterSelectHooks(exec boil.Executor) (err error) {
 	for _, hook := range {{$varNameSingular}}AfterSelectHooks {
 		if err := hook(exec, o); err != nil {
-			return errors.Err(err)
+			return err
 		}
 	}
 
@@ -82,7 +82,7 @@ func (o *{{$tableNameSingular}}) doAfterSelectHooks(exec boil.Executor) (err err
 func (o *{{$tableNameSingular}}) doAfterUpdateHooks(exec boil.Executor) (err error) {
 	for _, hook := range {{$varNameSingular}}AfterUpdateHooks {
 		if err := hook(exec, o); err != nil {
-			return errors.Err(err)
+			return err
 		}
 	}
 
@@ -93,7 +93,7 @@ func (o *{{$tableNameSingular}}) doAfterUpdateHooks(exec boil.Executor) (err err
 func (o *{{$tableNameSingular}}) doAfterDeleteHooks(exec boil.Executor) (err error) {
 	for _, hook := range {{$varNameSingular}}AfterDeleteHooks {
 		if err := hook(exec, o); err != nil {
-			return errors.Err(err)
+			return err
 		}
 	}
 
@@ -104,7 +104,7 @@ func (o *{{$tableNameSingular}}) doAfterDeleteHooks(exec boil.Executor) (err err
 func (o *{{$tableNameSingular}}) doAfterUpsertHooks(exec boil.Executor) (err error) {
 	for _, hook := range {{$varNameSingular}}AfterUpsertHooks {
 		if err := hook(exec, o); err != nil {
-			return errors.Err(err)
+			return err
 		}
 	}
 
diff --git a/templates/03_finishers.tpl b/templates/03_finishers.tpl
index 473bddd..429a276 100644
--- a/templates/03_finishers.tpl
+++ b/templates/03_finishers.tpl
@@ -1,27 +1,27 @@
 {{- $tableNameSingular := .Table.Name | singular | titleCase -}}
 {{- $varNameSingular := .Table.Name | singular | camelCase -}}
-// OneP returns a single {{$tableNameSingular}} record from the query, and panics on error.
-func (q {{$tableNameSingular}}Query) OneP() (*{{$tableNameSingular}}) {
+// OneP returns a single {{$varNameSingular}} record from the query, and panics on error.
+func (q {{$varNameSingular}}Query) OneP() (*{{$tableNameSingular}}) {
 	o, err := q.One()
 	if err != nil {
-		panic(errors.Err(err))
+		panic(boil.WrapErr(err))
 	}
 
 	return o
 }
 
-// One returns a single {{$tableNameSingular}} record from the query.
-func (q {{$tableNameSingular}}Query) One() (*{{$tableNameSingular}}, error) {
+// One returns a single {{$varNameSingular}} record from the query.
+func (q {{$varNameSingular}}Query) One() (*{{$tableNameSingular}}, error) {
 	o := &{{$tableNameSingular}}{}
 
 	queries.SetLimit(q.Query, 1)
 
 	err := q.Bind(o)
 	if err != nil {
-		if errors.Is(err, sql.ErrNoRows) {
-			return nil, nil
+		if errors.Cause(err) == sql.ErrNoRows {
+			return nil, sql.ErrNoRows
 		}
-		return nil, errors.Prefix("{{.PkgName}}: failed to execute a one query for {{.Table.Name}}", err)
+		return nil, errors.Wrap(err, "{{.PkgName}}: failed to execute a one query for {{.Table.Name}}")
 	}
 
 	{{if not .NoHooks -}}
@@ -34,22 +34,22 @@ func (q {{$tableNameSingular}}Query) One() (*{{$tableNameSingular}}, error) {
 }
 
 // AllP returns all {{$tableNameSingular}} records from the query, and panics on error.
-func (q {{$tableNameSingular}}Query) AllP() {{$tableNameSingular}}Slice {
+func (q {{$varNameSingular}}Query) AllP() {{$tableNameSingular}}Slice {
 	o, err := q.All()
 	if err != nil {
-		panic(errors.Err(err))
+		panic(boil.WrapErr(err))
 	}
 
 	return o
 }
 
 // All returns all {{$tableNameSingular}} records from the query.
-func (q {{$tableNameSingular}}Query) All() ({{$tableNameSingular}}Slice, error) {
-	var o []*{{$tableNameSingular}}
+func (q {{$varNameSingular}}Query) All() ({{$tableNameSingular}}Slice, error) {
+	var o {{$tableNameSingular}}Slice
 
 	err := q.Bind(&o)
 	if err != nil {
-		return nil, errors.Prefix("{{.PkgName}}: failed to assign all query results to {{$tableNameSingular}} slice", err)
+		return nil, errors.Wrap(err, "{{.PkgName}}: failed to assign all query results to {{$tableNameSingular}} slice")
 	}
 
 	{{if not .NoHooks -}}
@@ -66,17 +66,17 @@ func (q {{$tableNameSingular}}Query) All() ({{$tableNameSingular}}Slice, error)
 }
 
 // CountP returns the count of all {{$tableNameSingular}} records in the query, and panics on error.
-func (q {{$tableNameSingular}}Query) CountP() int64 {
+func (q {{$varNameSingular}}Query) CountP() int64 {
 	c, err := q.Count()
 	if err != nil {
-		panic(errors.Err(err))
+		panic(boil.WrapErr(err))
 	}
 
 	return c
 }
 
 // Count returns the count of all {{$tableNameSingular}} records in the query.
-func (q {{$tableNameSingular}}Query) Count() (int64, error) {
+func (q {{$varNameSingular}}Query) Count() (int64, error) {
 	var count int64
 
 	queries.SetSelect(q.Query, nil)
@@ -84,33 +84,32 @@ func (q {{$tableNameSingular}}Query) Count() (int64, error) {
 
 	err := q.Query.QueryRow().Scan(&count)
 	if err != nil {
-		return 0, errors.Prefix("{{.PkgName}}: failed to count {{.Table.Name}} rows", err)
+		return 0, errors.Wrap(err, "{{.PkgName}}: failed to count {{.Table.Name}} rows")
 	}
 
 	return count, nil
 }
 
 // Exists checks if the row exists in the table, and panics on error.
-func (q {{$tableNameSingular}}Query) ExistsP() bool {
+func (q {{$varNameSingular}}Query) ExistsP() bool {
 	e, err := q.Exists()
 	if err != nil {
-		panic(errors.Err(err))
+		panic(boil.WrapErr(err))
 	}
 
 	return e
 }
 
 // Exists checks if the row exists in the table.
-func (q {{$tableNameSingular}}Query) Exists() (bool, error) {
+func (q {{$varNameSingular}}Query) Exists() (bool, error) {
 	var count int64
 
 	queries.SetCount(q.Query)
-	queries.SetSelect(q.Query, []string{})
 	queries.SetLimit(q.Query, 1)
 
 	err := q.Query.QueryRow().Scan(&count)
 	if err != nil {
-		return false, errors.Prefix("{{.PkgName}}: failed to check if {{.Table.Name}} exists", err)
+		return false, errors.Wrap(err, "{{.PkgName}}: failed to check if {{.Table.Name}} exists")
 	}
 
 	return count > 0, nil
diff --git a/templates/04_relationship_to_one.tpl b/templates/04_relationship_to_one.tpl
index 9bc8c5f..05c75c9 100644
--- a/templates/04_relationship_to_one.tpl
+++ b/templates/04_relationship_to_one.tpl
@@ -3,14 +3,14 @@
 	{{- $dot := . -}}
 	{{- range .Table.FKeys -}}
 		{{- $txt := txtsFromFKey $dot.Tables $dot.Table . -}}
-		{{- $tableNameSingular := .ForeignTable | singular | titleCase}}
+		{{- $varNameSingular := .ForeignTable | singular | camelCase}}
 // {{$txt.Function.Name}}G pointed to by the foreign key.
-func (o *{{$txt.LocalTable.NameGo}}) {{$txt.Function.Name}}G(mods ...qm.QueryMod) {{$tableNameSingular}}Query {
+func (o *{{$txt.LocalTable.NameGo}}) {{$txt.Function.Name}}G(mods ...qm.QueryMod) {{$varNameSingular}}Query {
 	return o.{{$txt.Function.Name}}(boil.GetDB(), mods...)
 }
 
 // {{$txt.Function.Name}} pointed to by the foreign key.
-func (o *{{$txt.LocalTable.NameGo}}) {{$txt.Function.Name}}(exec boil.Executor, mods ...qm.QueryMod) ({{$tableNameSingular}}Query) {
+func (o *{{$txt.LocalTable.NameGo}}) {{$txt.Function.Name}}(exec boil.Executor, mods ...qm.QueryMod) ({{$varNameSingular}}Query) {
 	queryMods := []qm.QueryMod{
 		qm.Where("{{$txt.ForeignTable.ColumnName}}=?", o.{{$txt.LocalTable.ColumnNameGo}}),
 	}
diff --git a/templates/05_relationship_one_to_one.tpl b/templates/05_relationship_one_to_one.tpl
index 1dcd2ee..e74279c 100644
--- a/templates/05_relationship_one_to_one.tpl
+++ b/templates/05_relationship_one_to_one.tpl
@@ -3,14 +3,14 @@
 	{{- $dot := . -}}
 	{{- range .Table.ToOneRelationships -}}
 		{{- $txt := txtsFromOneToOne $dot.Tables $dot.Table . -}}
-		{{- $tableNameSingular := .ForeignTable | singular | titleCase}}
+		{{- $varNameSingular := .ForeignTable | singular | camelCase}}
 // {{$txt.Function.Name}}G pointed to by the foreign key.
-func (o *{{$txt.LocalTable.NameGo}}) {{$txt.Function.Name}}G(mods ...qm.QueryMod) {{$tableNameSingular}}Query {
+func (o *{{$txt.LocalTable.NameGo}}) {{$txt.Function.Name}}G(mods ...qm.QueryMod) {{$varNameSingular}}Query {
 	return o.{{$txt.Function.Name}}(boil.GetDB(), mods...)
 }
 
 // {{$txt.Function.Name}} pointed to by the foreign key.
-func (o *{{$txt.LocalTable.NameGo}}) {{$txt.Function.Name}}(exec boil.Executor, mods ...qm.QueryMod) ({{$tableNameSingular}}Query) {
+func (o *{{$txt.LocalTable.NameGo}}) {{$txt.Function.Name}}(exec boil.Executor, mods ...qm.QueryMod) ({{$varNameSingular}}Query) {
 	queryMods := []qm.QueryMod{
 		qm.Where("{{$txt.ForeignTable.ColumnName}}=?", o.{{$txt.LocalTable.ColumnNameGo}}),
 	}
diff --git a/templates/06_relationship_to_many.tpl b/templates/06_relationship_to_many.tpl
index 0e6e634..c108eeb 100644
--- a/templates/06_relationship_to_many.tpl
+++ b/templates/06_relationship_to_many.tpl
@@ -3,20 +3,20 @@
 	{{- $dot := . -}}
 	{{- $table := .Table -}}
 	{{- range .Table.ToManyRelationships -}}
-		{{- $tableNameSingular := .ForeignTable | singular | titleCase -}}
+		{{- $varNameSingular := .ForeignTable | singular | camelCase -}}
 		{{- $txt := txtsFromToMany $dot.Tables $table . -}}
 		{{- $schemaForeignTable := .ForeignTable | $dot.SchemaTable}}
 // {{$txt.Function.Name}}G retrieves all the {{.ForeignTable | singular}}'s {{$txt.ForeignTable.NameHumanReadable}}
 {{- if not (eq $txt.Function.Name $txt.ForeignTable.NamePluralGo)}} via {{.ForeignColumn}} column{{- end}}.
-func (o *{{$txt.LocalTable.NameGo}}) {{$txt.Function.Name}}G(mods ...qm.QueryMod) {{$tableNameSingular}}Query {
+func (o *{{$txt.LocalTable.NameGo}}) {{$txt.Function.Name}}G(mods ...qm.QueryMod) {{$varNameSingular}}Query {
 	return o.{{$txt.Function.Name}}(boil.GetDB(), mods...)
 }
 
 // {{$txt.Function.Name}} retrieves all the {{.ForeignTable | singular}}'s {{$txt.ForeignTable.NameHumanReadable}} with an executor
 {{- if not (eq $txt.Function.Name $txt.ForeignTable.NamePluralGo)}} via {{.ForeignColumn}} column{{- end}}.
-func (o *{{$txt.LocalTable.NameGo}}) {{$txt.Function.Name}}(exec boil.Executor, mods ...qm.QueryMod) {{$tableNameSingular}}Query {
+func (o *{{$txt.LocalTable.NameGo}}) {{$txt.Function.Name}}(exec boil.Executor, mods ...qm.QueryMod) {{$varNameSingular}}Query {
 	queryMods := []qm.QueryMod{
-		qm.Select("{{$schemaForeignTable}}.*"),
+		qm.Select("{{id 0 | $dot.Quotes}}.*"),
 	}
 
 	if len(mods) != 0 {
@@ -25,18 +25,17 @@ func (o *{{$txt.LocalTable.NameGo}}) {{$txt.Function.Name}}(exec boil.Executor,
 
 		{{if .ToJoinTable -}}
 	queryMods = append(queryMods,
-		{{$schemaJoinTable := .JoinTable | $.SchemaTable -}}
-		qm.InnerJoin("{{$schemaJoinTable}} on {{$schemaForeignTable}}.{{.ForeignColumn | $dot.Quotes}} = {{$schemaJoinTable}}.{{.JoinForeignColumn | $dot.Quotes}}"),
-		qm.Where("{{$schemaJoinTable}}.{{.JoinLocalColumn | $dot.Quotes}}=?", o.{{$txt.LocalTable.ColumnNameGo}}),
+		qm.InnerJoin("{{.JoinTable | $dot.SchemaTable}} as {{id 1 | $dot.Quotes}} on {{id 0 | $dot.Quotes}}.{{.ForeignColumn | $dot.Quotes}} = {{id 1 | $dot.Quotes}}.{{.JoinForeignColumn | $dot.Quotes}}"),
+		qm.Where("{{id 1 | $dot.Quotes}}.{{.JoinLocalColumn | $dot.Quotes}}=?", o.{{$txt.LocalTable.ColumnNameGo}}),
 	)
 		{{else -}}
 	queryMods = append(queryMods,
-		qm.Where("{{$schemaForeignTable}}.{{.ForeignColumn | $dot.Quotes}}=?", o.{{$txt.LocalTable.ColumnNameGo}}),
+		qm.Where("{{id 0 | $dot.Quotes}}.{{.ForeignColumn | $dot.Quotes}}=?", o.{{$txt.LocalTable.ColumnNameGo}}),
 	)
 		{{end}}
 
 	query := {{$txt.ForeignTable.NamePluralGo}}(exec, queryMods...)
-	queries.SetFrom(query.Query, "{{$schemaForeignTable}}")
+	queries.SetFrom(query.Query, "{{$schemaForeignTable}} as {{id 0 | $dot.Quotes}}")
 	return query
 }
 
diff --git a/templates/07_relationship_to_one_eager.tpl b/templates/07_relationship_to_one_eager.tpl
index f6bba5b..43392f0 100644
--- a/templates/07_relationship_to_one_eager.tpl
+++ b/templates/07_relationship_to_one_eager.tpl
@@ -4,7 +4,8 @@
 	{{- range .Table.FKeys -}}
 		{{- $txt := txtsFromFKey $dot.Tables $dot.Table . -}}
 		{{- $varNameSingular := $dot.Table.Name | singular | camelCase -}}
-		{{- $arg := printf "maybe%s" $txt.LocalTable.NameGo}}
+		{{- $arg := printf "maybe%s" $txt.LocalTable.NameGo -}}
+		{{- $slice := printf "%sSlice" $txt.LocalTable.NameGo}}
 // Load{{$txt.Function.Name}} allows an eager lookup of values, cached into the
 // loaded structs of the objects.
 func ({{$varNameSingular}}L) Load{{$txt.Function.Name}}(e boil.Executor, singular bool, {{$arg}} interface{}) error {
@@ -15,7 +16,7 @@ func ({{$varNameSingular}}L) Load{{$txt.Function.Name}}(e boil.Executor, singula
 	if singular {
 		object = {{$arg}}.(*{{$txt.LocalTable.NameGo}})
 	} else {
-		slice = *{{$arg}}.(*[]*{{$txt.LocalTable.NameGo}})
+		slice = *{{$arg}}.(*{{$slice}})
 		count = len(slice)
 	}
 
@@ -45,36 +46,32 @@ func ({{$varNameSingular}}L) Load{{$txt.Function.Name}}(e boil.Executor, singula
 
 	results, err := e.Query(query, args...)
 	if err != nil {
-		return errors.Prefix("failed to eager load {{$txt.ForeignTable.NameGo}}", err)
+		return errors.Wrap(err, "failed to eager load {{$txt.ForeignTable.NameGo}}")
 	}
 	defer results.Close()
 
 	var resultSlice []*{{$txt.ForeignTable.NameGo}}
 	if err = queries.Bind(results, &resultSlice); err != nil {
-		return errors.Prefix("failed to bind eager loaded slice {{$txt.ForeignTable.NameGo}}", err)
+		return errors.Wrap(err, "failed to bind eager loaded slice {{$txt.ForeignTable.NameGo}}")
 	}
 
 	{{if not $dot.NoHooks -}}
 	if len({{$varNameSingular}}AfterSelectHooks) != 0 {
 		for _, obj := range resultSlice {
 			if err := obj.doAfterSelectHooks(e); err != nil {
-				return errors.Err(err)
+				return err
 			}
 		}
 	}
 	{{- end}}
 
-	if len(resultSlice) == 0 {
-		return nil
-	}
-
-	if singular {
+	if singular && len(resultSlice) != 0 {
 		object.R.{{$txt.Function.Name}} = resultSlice[0]
 		return nil
 	}
 
-	for _, local := range slice {
-		for _, foreign := range resultSlice {
+	for _, foreign := range resultSlice {
+		for _, local := range slice {
 			{{if $txt.Function.UsesBytes -}}
 			if 0 == bytes.Compare(local.{{$txt.Function.LocalAssignment}}, foreign.{{$txt.Function.ForeignAssignment}}) {
 			{{else -}}
diff --git a/templates/08_relationship_one_to_one_eager.tpl b/templates/08_relationship_one_to_one_eager.tpl
index cd587fc..6603d55 100644
--- a/templates/08_relationship_one_to_one_eager.tpl
+++ b/templates/08_relationship_one_to_one_eager.tpl
@@ -5,6 +5,7 @@
 		{{- $txt := txtsFromOneToOne $dot.Tables $dot.Table . -}}
 		{{- $varNameSingular := $dot.Table.Name | singular | camelCase -}}
 		{{- $arg := printf "maybe%s" $txt.LocalTable.NameGo -}}
+		{{- $slice := printf "%sSlice" $txt.LocalTable.NameGo}}
 // Load{{$txt.Function.Name}} allows an eager lookup of values, cached into the
 // loaded structs of the objects.
 func ({{$varNameSingular}}L) Load{{$txt.Function.Name}}(e boil.Executor, singular bool, {{$arg}} interface{}) error {
@@ -15,7 +16,7 @@ func ({{$varNameSingular}}L) Load{{$txt.Function.Name}}(e boil.Executor, singula
 	if singular {
 		object = {{$arg}}.(*{{$txt.LocalTable.NameGo}})
 	} else {
-		slice = *{{$arg}}.(*[]*{{$txt.LocalTable.NameGo}})
+		slice = *{{$arg}}.(*{{$slice}})
 		count = len(slice)
 	}
 
@@ -45,36 +46,32 @@ func ({{$varNameSingular}}L) Load{{$txt.Function.Name}}(e boil.Executor, singula
 
 	results, err := e.Query(query, args...)
 	if err != nil {
-		return errors.Prefix("failed to eager load {{$txt.ForeignTable.NameGo}}", err)
+		return errors.Wrap(err, "failed to eager load {{$txt.ForeignTable.NameGo}}")
 	}
 	defer results.Close()
 
 	var resultSlice []*{{$txt.ForeignTable.NameGo}}
 	if err = queries.Bind(results, &resultSlice); err != nil {
-		return errors.Prefix("failed to bind eager loaded slice {{$txt.ForeignTable.NameGo}}", err)
+		return errors.Wrap(err, "failed to bind eager loaded slice {{$txt.ForeignTable.NameGo}}")
 	}
 
 	{{if not $dot.NoHooks -}}
 	if len({{$varNameSingular}}AfterSelectHooks) != 0 {
 		for _, obj := range resultSlice {
 			if err := obj.doAfterSelectHooks(e); err != nil {
-				return errors.Err(err)
+				return err
 			}
 		}
 	}
 	{{- end}}
 
-	if len(resultSlice) == 0 {
-		return nil
-	}
-
-	if singular {
+	if singular && len(resultSlice) != 0 {
 		object.R.{{$txt.Function.Name}} = resultSlice[0]
 		return nil
 	}
 
-	for _, local := range slice {
-		for _, foreign := range resultSlice {
+	for _, foreign := range resultSlice {
+		for _, local := range slice {
 			{{if $txt.Function.UsesBytes -}}
 			if 0 == bytes.Compare(local.{{$txt.Function.LocalAssignment}}, foreign.{{$txt.Function.ForeignAssignment}}) {
 			{{else -}}
diff --git a/templates/09_relationship_to_many_eager.tpl b/templates/09_relationship_to_many_eager.tpl
index f3fd2c4..f1a7f5d 100644
--- a/templates/09_relationship_to_many_eager.tpl
+++ b/templates/09_relationship_to_many_eager.tpl
@@ -5,6 +5,7 @@
 		{{- $varNameSingular := $dot.Table.Name | singular | camelCase -}}
 		{{- $txt := txtsFromToMany $dot.Tables $dot.Table . -}}
 		{{- $arg := printf "maybe%s" $txt.LocalTable.NameGo -}}
+		{{- $slice := printf "%sSlice" $txt.LocalTable.NameGo -}}
 		{{- $schemaForeignTable := .ForeignTable | $dot.SchemaTable}}
 // Load{{$txt.Function.Name}} allows an eager lookup of values, cached into the
 // loaded structs of the objects.
@@ -16,7 +17,7 @@ func ({{$varNameSingular}}L) Load{{$txt.Function.Name}}(e boil.Executor, singula
 	if singular {
 		object = {{$arg}}.(*{{$txt.LocalTable.NameGo}})
 	} else {
-		slice = *{{$arg}}.(*[]*{{$txt.LocalTable.NameGo}})
+		slice = *{{$arg}}.(*{{$slice}})
 		count = len(slice)
 	}
 
@@ -54,7 +55,7 @@ func ({{$varNameSingular}}L) Load{{$txt.Function.Name}}(e boil.Executor, singula
 
 	results, err := e.Query(query, args...)
 	if err != nil {
-		return errors.Prefix("failed to eager load {{.ForeignTable}}", err)
+		return errors.Wrap(err, "failed to eager load {{.ForeignTable}}")
 	}
 	defer results.Close()
 
@@ -70,7 +71,7 @@ func ({{$varNameSingular}}L) Load{{$txt.Function.Name}}(e boil.Executor, singula
 
 		err = results.Scan({{$foreignTable.Columns | columnNames | stringMap $dot.StringFuncs.titleCase | prefixStringSlice "&one." | join ", "}}, &localJoinCol)
 		if err = results.Err(); err != nil {
-			return errors.Prefix("failed to plebian-bind eager loaded slice {{.ForeignTable}}", err)
+			return errors.Wrap(err, "failed to plebian-bind eager loaded slice {{.ForeignTable}}")
 		}
 
 		resultSlice = append(resultSlice, one)
@@ -78,11 +79,11 @@ func ({{$varNameSingular}}L) Load{{$txt.Function.Name}}(e boil.Executor, singula
 	}
 
 	if err = results.Err(); err != nil {
-		return errors.Prefix("failed to plebian-bind eager loaded slice {{.ForeignTable}}", err)
+		return errors.Wrap(err, "failed to plebian-bind eager loaded slice {{.ForeignTable}}")
 	}
 	{{else -}}
 	if err = queries.Bind(results, &resultSlice); err != nil {
-		return errors.Prefix("failed to bind eager loaded slice {{.ForeignTable}}", err)
+		return errors.Wrap(err, "failed to bind eager loaded slice {{.ForeignTable}}")
 	}
 	{{end}}
 
@@ -90,7 +91,7 @@ func ({{$varNameSingular}}L) Load{{$txt.Function.Name}}(e boil.Executor, singula
 	if len({{.ForeignTable | singular | camelCase}}AfterSelectHooks) != 0 {
 		for _, obj := range resultSlice {
 			if err := obj.doAfterSelectHooks(e); err != nil {
-				return errors.Err(err)
+				return err
 			}
 		}
 	}
diff --git a/templates/10_relationship_to_one_setops.tpl b/templates/10_relationship_to_one_setops.tpl
index 200adc3..728ab10 100644
--- a/templates/10_relationship_to_one_setops.tpl
+++ b/templates/10_relationship_to_one_setops.tpl
@@ -6,34 +6,6 @@
 		{{- $foreignNameSingular := .ForeignTable | singular | camelCase -}}
 		{{- $varNameSingular := .Table | singular | camelCase}}
 		{{- $schemaTable := .Table | $dot.SchemaTable}}
-// Set{{$txt.Function.Name}}G of the {{.Table | singular}} to the related item.
-// Sets o.R.{{$txt.Function.Name}} to related.
-// Adds o to related.R.{{$txt.Function.ForeignName}}.
-// Uses the global database handle.
-func (o *{{$txt.LocalTable.NameGo}}) Set{{$txt.Function.Name}}G(insert bool, related *{{$txt.ForeignTable.NameGo}}) error {
-	return o.Set{{$txt.Function.Name}}(boil.GetDB(), insert, related)
-}
-
-// Set{{$txt.Function.Name}}P of the {{.Table | singular}} to the related item.
-// Sets o.R.{{$txt.Function.Name}} to related.
-// Adds o to related.R.{{$txt.Function.ForeignName}}.
-// Panics on error.
-func (o *{{$txt.LocalTable.NameGo}}) Set{{$txt.Function.Name}}P(exec boil.Executor, insert bool, related *{{$txt.ForeignTable.NameGo}}) {
-	if err := o.Set{{$txt.Function.Name}}(exec, insert, related); err != nil {
-		panic(errors.Err(err))
-	}
-}
-
-// Set{{$txt.Function.Name}}GP of the {{.Table | singular}} to the related item.
-// Sets o.R.{{$txt.Function.Name}} to related.
-// Adds o to related.R.{{$txt.Function.ForeignName}}.
-// Uses the global database handle and panics on error.
-func (o *{{$txt.LocalTable.NameGo}}) Set{{$txt.Function.Name}}GP(insert bool, related *{{$txt.ForeignTable.NameGo}}) {
-	if err := o.Set{{$txt.Function.Name}}(boil.GetDB(), insert, related); err != nil {
-		panic(errors.Err(err))
-	}
-}
-
 // Set{{$txt.Function.Name}} of the {{.Table | singular}} to the related item.
 // Sets o.R.{{$txt.Function.Name}} to related.
 // Adds o to related.R.{{$txt.Function.ForeignName}}.
@@ -41,7 +13,7 @@ func (o *{{$txt.LocalTable.NameGo}}) Set{{$txt.Function.Name}}(exec boil.Executo
 	var err error
 	if insert {
 		if err = related.Insert(exec); err != nil {
-			return errors.Prefix("failed to insert into foreign table", err)
+			return errors.Wrap(err, "failed to insert into foreign table")
 		}
 	}
 
@@ -58,7 +30,7 @@ func (o *{{$txt.LocalTable.NameGo}}) Set{{$txt.Function.Name}}(exec boil.Executo
 	}
 
 	if _, err = exec.Exec(updateQuery, values...); err != nil {
-		return errors.Prefix("failed to update local table", err)
+		return errors.Wrap(err, "failed to update local table")
 	}
 
 	o.{{$txt.Function.LocalAssignment}} = related.{{$txt.Function.ForeignAssignment}}
@@ -96,34 +68,6 @@ func (o *{{$txt.LocalTable.NameGo}}) Set{{$txt.Function.Name}}(exec boil.Executo
 }
 
 		{{- if .Nullable}}
-// Remove{{$txt.Function.Name}}G relationship.
-// Sets o.R.{{$txt.Function.Name}} to nil.
-// Removes o from all passed in related items' relationships struct (Optional).
-// Uses the global database handle.
-func (o *{{$txt.LocalTable.NameGo}}) Remove{{$txt.Function.Name}}G(related *{{$txt.ForeignTable.NameGo}}) error {
-	return o.Remove{{$txt.Function.Name}}(boil.GetDB(), related)
-}
-
-// Remove{{$txt.Function.Name}}P relationship.
-// Sets o.R.{{$txt.Function.Name}} to nil.
-// Removes o from all passed in related items' relationships struct (Optional).
-// Panics on error.
-func (o *{{$txt.LocalTable.NameGo}}) Remove{{$txt.Function.Name}}P(exec boil.Executor, related *{{$txt.ForeignTable.NameGo}}) {
-	if err := o.Remove{{$txt.Function.Name}}(exec, related); err != nil {
-		panic(errors.Err(err))
-	}
-}
-
-// Remove{{$txt.Function.Name}}GP relationship.
-// Sets o.R.{{$txt.Function.Name}} to nil.
-// Removes o from all passed in related items' relationships struct (Optional).
-// Uses the global database handle and panics on error.
-func (o *{{$txt.LocalTable.NameGo}}) Remove{{$txt.Function.Name}}GP(related *{{$txt.ForeignTable.NameGo}}) {
-	if err := o.Remove{{$txt.Function.Name}}(boil.GetDB(), related); err != nil {
-		panic(errors.Err(err))
-	}
-}
-
 // Remove{{$txt.Function.Name}} relationship.
 // Sets o.R.{{$txt.Function.Name}} to nil.
 // Removes o from all passed in related items' relationships struct (Optional).
@@ -133,7 +77,7 @@ func (o *{{$txt.LocalTable.NameGo}}) Remove{{$txt.Function.Name}}(exec boil.Exec
 	o.{{$txt.LocalTable.ColumnNameGo}}.Valid = false
 	if err = o.Update(exec, "{{.Column}}"); err != nil {
 		o.{{$txt.LocalTable.ColumnNameGo}}.Valid = true
-		return errors.Prefix("failed to update local table", err)
+		return errors.Wrap(err, "failed to update local table")
 	}
 
 	o.R.{{$txt.Function.Name}} = nil
diff --git a/templates/11_relationship_one_to_one_setops.tpl b/templates/11_relationship_one_to_one_setops.tpl
index 29ce0a1..7466d7a 100644
--- a/templates/11_relationship_one_to_one_setops.tpl
+++ b/templates/11_relationship_one_to_one_setops.tpl
@@ -7,34 +7,6 @@
 		{{- $foreignVarNameSingular := .ForeignTable | singular | camelCase -}}
 		{{- $foreignPKeyCols := (getTable $dot.Tables .ForeignTable).PKey.Columns -}}
 		{{- $foreignSchemaTable := .ForeignTable | $dot.SchemaTable}}
-// Set{{$txt.Function.Name}}G of the {{.Table | singular}} to the related item.
-// Sets o.R.{{$txt.Function.Name}} to related.
-// Adds o to related.R.{{$txt.Function.ForeignName}}.
-// Uses the global database handle.
-func (o *{{$txt.LocalTable.NameGo}}) Set{{$txt.Function.Name}}G(insert bool, related *{{$txt.ForeignTable.NameGo}}) error {
-	return o.Set{{$txt.Function.Name}}(boil.GetDB(), insert, related)
-}
-
-// Set{{$txt.Function.Name}}P of the {{.Table | singular}} to the related item.
-// Sets o.R.{{$txt.Function.Name}} to related.
-// Adds o to related.R.{{$txt.Function.ForeignName}}.
-// Panics on error.
-func (o *{{$txt.LocalTable.NameGo}}) Set{{$txt.Function.Name}}P(exec boil.Executor, insert bool, related *{{$txt.ForeignTable.NameGo}}) {
-	if err := o.Set{{$txt.Function.Name}}(exec, insert, related); err != nil {
-		panic(errors.Err(err))
-	}
-}
-
-// Set{{$txt.Function.Name}}GP of the {{.Table | singular}} to the related item.
-// Sets o.R.{{$txt.Function.Name}} to related.
-// Adds o to related.R.{{$txt.Function.ForeignName}}.
-// Uses the global database handle and panics on error.
-func (o *{{$txt.LocalTable.NameGo}}) Set{{$txt.Function.Name}}GP(insert bool, related *{{$txt.ForeignTable.NameGo}}) {
-	if err := o.Set{{$txt.Function.Name}}(boil.GetDB(), insert, related); err != nil {
-		panic(errors.Err(err))
-	}
-}
-
 // Set{{$txt.Function.Name}} of the {{.Table | singular}} to the related item.
 // Sets o.R.{{$txt.Function.Name}} to related.
 // Adds o to related.R.{{$txt.Function.ForeignName}}.
@@ -48,7 +20,7 @@ func (o *{{$txt.LocalTable.NameGo}}) Set{{$txt.Function.Name}}(exec boil.Executo
 		{{- end}}
 
 		if err = related.Insert(exec); err != nil {
-			return errors.Prefix("failed to insert into foreign table", err)
+			return errors.Wrap(err, "failed to insert into foreign table")
 		}
 	} else {
 		updateQuery := fmt.Sprintf(
@@ -64,7 +36,7 @@ func (o *{{$txt.LocalTable.NameGo}}) Set{{$txt.Function.Name}}(exec boil.Executo
 		}
 
 		if _, err = exec.Exec(updateQuery, values...); err != nil {
-			return errors.Prefix("failed to update foreign table", err)
+			return errors.Wrap(err, "failed to update foreign table")
 		}
 
 		related.{{$txt.Function.ForeignAssignment}} = o.{{$txt.Function.LocalAssignment}}
@@ -93,34 +65,6 @@ func (o *{{$txt.LocalTable.NameGo}}) Set{{$txt.Function.Name}}(exec boil.Executo
 }
 
 		{{- if .ForeignColumnNullable}}
-// Remove{{$txt.Function.Name}}G relationship.
-// Sets o.R.{{$txt.Function.Name}} to nil.
-// Removes o from all passed in related items' relationships struct (Optional).
-// Uses the global database handle.
-func (o *{{$txt.LocalTable.NameGo}}) Remove{{$txt.Function.Name}}G(related *{{$txt.ForeignTable.NameGo}}) error {
-	return o.Remove{{$txt.Function.Name}}(boil.GetDB(), related)
-}
-
-// Remove{{$txt.Function.Name}}P relationship.
-// Sets o.R.{{$txt.Function.Name}} to nil.
-// Removes o from all passed in related items' relationships struct (Optional).
-// Panics on error.
-func (o *{{$txt.LocalTable.NameGo}}) Remove{{$txt.Function.Name}}P(exec boil.Executor, related *{{$txt.ForeignTable.NameGo}}) {
-	if err := o.Remove{{$txt.Function.Name}}(exec, related); err != nil {
-		panic(errors.Err(err))
-	}
-}
-
-// Remove{{$txt.Function.Name}}GP relationship.
-// Sets o.R.{{$txt.Function.Name}} to nil.
-// Removes o from all passed in related items' relationships struct (Optional).
-// Uses the global database handle and panics on error.
-func (o *{{$txt.LocalTable.NameGo}}) Remove{{$txt.Function.Name}}GP(related *{{$txt.ForeignTable.NameGo}}) {
-	if err := o.Remove{{$txt.Function.Name}}(boil.GetDB(), related); err != nil {
-		panic(errors.Err(err))
-	}
-}
-
 // Remove{{$txt.Function.Name}} relationship.
 // Sets o.R.{{$txt.Function.Name}} to nil.
 // Removes o from all passed in related items' relationships struct (Optional).
@@ -130,7 +74,7 @@ func (o *{{$txt.LocalTable.NameGo}}) Remove{{$txt.Function.Name}}(exec boil.Exec
 	related.{{$txt.ForeignTable.ColumnNameGo}}.Valid = false
 	if err = related.Update(exec, "{{.ForeignColumn}}"); err != nil {
 		related.{{$txt.ForeignTable.ColumnNameGo}}.Valid = true
-		return errors.Prefix("failed to update local table", err)
+		return errors.Wrap(err, "failed to update local table")
 	}
 
 	o.R.{{$txt.Function.Name}} = nil
diff --git a/templates/12_relationship_to_many_setops.tpl b/templates/12_relationship_to_many_setops.tpl
index e54c2fd..cfa0819 100644
--- a/templates/12_relationship_to_many_setops.tpl
+++ b/templates/12_relationship_to_many_setops.tpl
@@ -6,39 +6,6 @@
 		{{- $txt := txtsFromToMany $dot.Tables $table . -}}
 		{{- $varNameSingular := .Table | singular | camelCase -}}
 		{{- $foreignVarNameSingular := .ForeignTable | singular | camelCase}}
-		{{- $foreignPKeyCols := (getTable $dot.Tables .ForeignTable).PKey.Columns -}}
-		{{- $foreignSchemaTable := .ForeignTable | $dot.SchemaTable}}
-// Add{{$txt.Function.Name}}G adds the given related objects to the existing relationships
-// of the {{$table.Name | singular}}, optionally inserting them as new records.
-// Appends related to o.R.{{$txt.Function.Name}}.
-// Sets related.R.{{$txt.Function.ForeignName}} appropriately.
-// Uses the global database handle.
-func (o *{{$txt.LocalTable.NameGo}}) Add{{$txt.Function.Name}}G(insert bool, related ...*{{$txt.ForeignTable.NameGo}}) error {
-	return o.Add{{$txt.Function.Name}}(boil.GetDB(), insert, related...)
-}
-
-// Add{{$txt.Function.Name}}P adds the given related objects to the existing relationships
-// of the {{$table.Name | singular}}, optionally inserting them as new records.
-// Appends related to o.R.{{$txt.Function.Name}}.
-// Sets related.R.{{$txt.Function.ForeignName}} appropriately.
-// Panics on error.
-func (o *{{$txt.LocalTable.NameGo}}) Add{{$txt.Function.Name}}P(exec boil.Executor, insert bool, related ...*{{$txt.ForeignTable.NameGo}}) {
-	if err := o.Add{{$txt.Function.Name}}(exec, insert, related...); err != nil {
-		panic(errors.Err(err))
-	}
-}
-
-// Add{{$txt.Function.Name}}GP adds the given related objects to the existing relationships
-// of the {{$table.Name | singular}}, optionally inserting them as new records.
-// Appends related to o.R.{{$txt.Function.Name}}.
-// Sets related.R.{{$txt.Function.ForeignName}} appropriately.
-// Uses the global database handle and panics on error.
-func (o *{{$txt.LocalTable.NameGo}}) Add{{$txt.Function.Name}}GP(insert bool, related ...*{{$txt.ForeignTable.NameGo}}) {
-	if err := o.Add{{$txt.Function.Name}}(boil.GetDB(), insert, related...); err != nil {
-		panic(errors.Err(err))
-	}
-}
-
 // Add{{$txt.Function.Name}} adds the given related objects to the existing relationships
 // of the {{$table.Name | singular}}, optionally inserting them as new records.
 // Appends related to o.R.{{$txt.Function.Name}}.
@@ -46,38 +13,20 @@ func (o *{{$txt.LocalTable.NameGo}}) Add{{$txt.Function.Name}}GP(insert bool, re
 func (o *{{$txt.LocalTable.NameGo}}) Add{{$txt.Function.Name}}(exec boil.Executor, insert bool, related ...*{{$txt.ForeignTable.NameGo}}) error {
 	var err error
 	for _, rel := range related {
-		if insert {
-			{{if not .ToJoinTable -}}
-			rel.{{$txt.Function.ForeignAssignment}} = o.{{$txt.Function.LocalAssignment}}
-				{{if .ForeignColumnNullable -}}
-			rel.{{$txt.ForeignTable.ColumnNameGo}}.Valid = true
-				{{end -}}
+		{{if not .ToJoinTable -}}
+		rel.{{$txt.Function.ForeignAssignment}} = o.{{$txt.Function.LocalAssignment}}
+			{{if .ForeignColumnNullable -}}
+		rel.{{$txt.ForeignTable.ColumnNameGo}}.Valid = true
 			{{end -}}
-
+		{{end -}}
+		if insert {
 			if err = rel.Insert(exec); err != nil {
-				return errors.Prefix("failed to insert into foreign table", err)
+				return errors.Wrap(err, "failed to insert into foreign table")
 			}
 		}{{if not .ToJoinTable}} else {
-			updateQuery := fmt.Sprintf(
-				"UPDATE {{$foreignSchemaTable}} SET %s WHERE %s",
-				strmangle.SetParamNames("{{$dot.LQ}}", "{{$dot.RQ}}", {{if $dot.Dialect.IndexPlaceholders}}1{{else}}0{{end}}, []string{{"{"}}"{{.ForeignColumn}}"{{"}"}}),
-				strmangle.WhereClause("{{$dot.LQ}}", "{{$dot.RQ}}", {{if $dot.Dialect.IndexPlaceholders}}2{{else}}0{{end}}, {{$foreignVarNameSingular}}PrimaryKeyColumns),
-			)
-			values := []interface{}{o.{{$txt.LocalTable.ColumnNameGo}}, rel.{{$foreignPKeyCols | stringMap $dot.StringFuncs.titleCase | join ", rel."}}{{"}"}}
-
-			if boil.DebugMode {
-				fmt.Fprintln(boil.DebugWriter, updateQuery)
-				fmt.Fprintln(boil.DebugWriter, values)
+			if err = rel.Update(exec, "{{.ForeignColumn}}"); err != nil {
+				return errors.Wrap(err, "failed to update foreign table")
 			}
-
-			if _, err = exec.Exec(updateQuery, values...); err != nil {
-				return errors.Prefix("failed to update foreign table", err)
-			}
-
-			rel.{{$txt.Function.ForeignAssignment}} = o.{{$txt.Function.LocalAssignment}}
-			{{if .ForeignColumnNullable -}}
-			rel.{{$txt.ForeignTable.ColumnNameGo}}.Valid = true
-			{{end -}}
 		}{{end -}}
 	}
 
@@ -93,7 +42,7 @@ func (o *{{$txt.LocalTable.NameGo}}) Add{{$txt.Function.Name}}(exec boil.Executo
 
 		_, err = exec.Exec(query, values...)
 		if err != nil {
-			return errors.Prefix("failed to insert into join table", err)
+			return errors.Wrap(err, "failed to insert into join table")
 		}
 	}
 	{{end -}}
@@ -132,43 +81,6 @@ func (o *{{$txt.LocalTable.NameGo}}) Add{{$txt.Function.Name}}(exec boil.Executo
 }
 
 			{{- if (or .ForeignColumnNullable .ToJoinTable)}}
-// Set{{$txt.Function.Name}}G removes all previously related items of the
-// {{$table.Name | singular}} replacing them completely with the passed
-// in related items, optionally inserting them as new records.
-// Sets o.R.{{$txt.Function.ForeignName}}'s {{$txt.Function.Name}} accordingly.
-// Replaces o.R.{{$txt.Function.Name}} with related.
-// Sets related.R.{{$txt.Function.ForeignName}}'s {{$txt.Function.Name}} accordingly.
-// Uses the global database handle.
-func (o *{{$txt.LocalTable.NameGo}}) Set{{$txt.Function.Name}}G(insert bool, related ...*{{$txt.ForeignTable.NameGo}}) error {
-	return o.Set{{$txt.Function.Name}}(boil.GetDB(), insert, related...)
-}
-
-// Set{{$txt.Function.Name}}P removes all previously related items of the
-// {{$table.Name | singular}} replacing them completely with the passed
-// in related items, optionally inserting them as new records.
-// Sets o.R.{{$txt.Function.ForeignName}}'s {{$txt.Function.Name}} accordingly.
-// Replaces o.R.{{$txt.Function.Name}} with related.
-// Sets related.R.{{$txt.Function.ForeignName}}'s {{$txt.Function.Name}} accordingly.
-// Panics on error.
-func (o *{{$txt.LocalTable.NameGo}}) Set{{$txt.Function.Name}}P(exec boil.Executor, insert bool, related ...*{{$txt.ForeignTable.NameGo}}) {
-	if err := o.Set{{$txt.Function.Name}}(exec, insert, related...); err != nil {
-		panic(errors.Err(err))
-	}
-}
-
-// Set{{$txt.Function.Name}}GP removes all previously related items of the
-// {{$table.Name | singular}} replacing them completely with the passed
-// in related items, optionally inserting them as new records.
-// Sets o.R.{{$txt.Function.ForeignName}}'s {{$txt.Function.Name}} accordingly.
-// Replaces o.R.{{$txt.Function.Name}} with related.
-// Sets related.R.{{$txt.Function.ForeignName}}'s {{$txt.Function.Name}} accordingly.
-// Uses the global database handle and panics on error.
-func (o *{{$txt.LocalTable.NameGo}}) Set{{$txt.Function.Name}}GP(insert bool, related ...*{{$txt.ForeignTable.NameGo}}) {
-	if err := o.Set{{$txt.Function.Name}}(boil.GetDB(), insert, related...); err != nil {
-		panic(errors.Err(err))
-	}
-}
-
 // Set{{$txt.Function.Name}} removes all previously related items of the
 // {{$table.Name | singular}} replacing them completely with the passed
 // in related items, optionally inserting them as new records.
@@ -190,14 +102,12 @@ func (o *{{$txt.LocalTable.NameGo}}) Set{{$txt.Function.Name}}(exec boil.Executo
 
 	_, err := exec.Exec(query, values...)
 	if err != nil {
-		return errors.Prefix("failed to remove relationships before set", err)
+		return errors.Wrap(err, "failed to remove relationships before set")
 	}
 
 	{{if .ToJoinTable -}}
 	remove{{$txt.Function.Name}}From{{$txt.Function.ForeignName}}Slice(o, related)
-	if o.R != nil {
-		o.R.{{$txt.Function.Name}} = nil
-	}
+	o.R.{{$txt.Function.Name}} = nil
 	{{else -}}
 	if o.R != nil {
 		for _, rel := range o.R.{{$txt.Function.Name}} {
@@ -216,34 +126,6 @@ func (o *{{$txt.LocalTable.NameGo}}) Set{{$txt.Function.Name}}(exec boil.Executo
 	return o.Add{{$txt.Function.Name}}(exec, insert, related...)
 }
 
-// Remove{{$txt.Function.Name}}G relationships from objects passed in.
-// Removes related items from R.{{$txt.Function.Name}} (uses pointer comparison, removal does not keep order)
-// Sets related.R.{{$txt.Function.ForeignName}}.
-// Uses the global database handle.
-func (o *{{$txt.LocalTable.NameGo}}) Remove{{$txt.Function.Name}}G(related ...*{{$txt.ForeignTable.NameGo}}) error {
-	return o.Remove{{$txt.Function.Name}}(boil.GetDB(), related...)
-}
-
-// Remove{{$txt.Function.Name}}P relationships from objects passed in.
-// Removes related items from R.{{$txt.Function.Name}} (uses pointer comparison, removal does not keep order)
-// Sets related.R.{{$txt.Function.ForeignName}}.
-// Panics on error.
-func (o *{{$txt.LocalTable.NameGo}}) Remove{{$txt.Function.Name}}P(exec boil.Executor, related ...*{{$txt.ForeignTable.NameGo}}) {
-	if err := o.Remove{{$txt.Function.Name}}(exec, related...); err != nil {
-		panic(errors.Err(err))
-	}
-}
-
-// Remove{{$txt.Function.Name}}GP relationships from objects passed in.
-// Removes related items from R.{{$txt.Function.Name}} (uses pointer comparison, removal does not keep order)
-// Sets related.R.{{$txt.Function.ForeignName}}.
-// Uses the global database handle and panics on error.
-func (o *{{$txt.LocalTable.NameGo}}) Remove{{$txt.Function.Name}}GP(related ...*{{$txt.ForeignTable.NameGo}}) {
-	if err := o.Remove{{$txt.Function.Name}}(boil.GetDB(), related...); err != nil {
-		panic(errors.Err(err))
-	}
-}
-
 // Remove{{$txt.Function.Name}} relationships from objects passed in.
 // Removes related items from R.{{$txt.Function.Name}} (uses pointer comparison, removal does not keep order)
 // Sets related.R.{{$txt.Function.ForeignName}}.
@@ -252,12 +134,9 @@ func (o *{{$txt.LocalTable.NameGo}}) Remove{{$txt.Function.Name}}(exec boil.Exec
 	{{if .ToJoinTable -}}
 	query := fmt.Sprintf(
 		"delete from {{.JoinTable | $dot.SchemaTable}} where {{.JoinLocalColumn | $dot.Quotes}} = {{if $dot.Dialect.IndexPlaceholders}}$1{{else}}?{{end}} and {{.JoinForeignColumn | $dot.Quotes}} in (%s)",
-		strmangle.Placeholders(dialect.IndexPlaceholders, len(related), 2, 1),
+		strmangle.Placeholders(dialect.IndexPlaceholders, len(related), 1, 1),
 	)
 	values := []interface{}{{"{"}}o.{{$txt.LocalTable.ColumnNameGo}}}
-	for _, rel := range related {
-		values = append(values, rel.{{$txt.ForeignTable.ColumnNameGo}})
-	}
 
 	if boil.DebugMode {
 		fmt.Fprintln(boil.DebugWriter, query)
@@ -266,7 +145,7 @@ func (o *{{$txt.LocalTable.NameGo}}) Remove{{$txt.Function.Name}}(exec boil.Exec
 
 	_, err = exec.Exec(query, values...)
 	if err != nil {
-		return errors.Prefix("failed to remove relationships before set", err)
+		return errors.Wrap(err, "failed to remove relationships before set")
 	}
 	{{else -}}
 	for _, rel := range related {
@@ -277,7 +156,7 @@ func (o *{{$txt.LocalTable.NameGo}}) Remove{{$txt.Function.Name}}(exec boil.Exec
 		}
 		{{end -}}
 		if err = rel.Update(exec, "{{.ForeignColumn}}"); err != nil {
-			return errors.Err(err)
+			return err
 		}
 	}
 	{{end -}}
diff --git a/templates/13_all.tpl b/templates/13_all.tpl
index c41f19c..42cf66a 100644
--- a/templates/13_all.tpl
+++ b/templates/13_all.tpl
@@ -1,12 +1,12 @@
 {{- $tableNamePlural := .Table.Name | plural | titleCase -}}
-{{- $tableNameSingular := .Table.Name | singular | titleCase}}
+{{- $varNameSingular := .Table.Name | singular | camelCase}}
 // {{$tableNamePlural}}G retrieves all records.
-func {{$tableNamePlural}}G(mods ...qm.QueryMod) {{$tableNameSingular}}Query {
+func {{$tableNamePlural}}G(mods ...qm.QueryMod) {{$varNameSingular}}Query {
 	return {{$tableNamePlural}}(boil.GetDB(), mods...)
 }
 
 // {{$tableNamePlural}} retrieves all the records using an executor.
-func {{$tableNamePlural}}(exec boil.Executor, mods ...qm.QueryMod) {{$tableNameSingular}}Query {
+func {{$tableNamePlural}}(exec boil.Executor, mods ...qm.QueryMod) {{$varNameSingular}}Query {
 	mods = append(mods, qm.From("{{.Table.Name | .SchemaTable}}"))
-	return {{$tableNameSingular}}Query{NewQuery(exec, mods...)}
+	return {{$varNameSingular}}Query{NewQuery(exec, mods...)}
 }
diff --git a/templates/14_find.tpl b/templates/14_find.tpl
index f9a88c8..4ce7b15 100644
--- a/templates/14_find.tpl
+++ b/templates/14_find.tpl
@@ -1,7 +1,7 @@
 {{- $tableNameSingular := .Table.Name | singular | titleCase -}}
 {{- $varNameSingular := .Table.Name | singular | camelCase -}}
 {{- $colDefs := sqlColDefinitions .Table.Columns .Table.PKey.Columns -}}
-{{- $pkNames := $colDefs.Names | stringMap .StringFuncs.camelCase | stringMap .StringFuncs.replaceReserved -}}
+{{- $pkNames := $colDefs.Names | stringMap .StringFuncs.camelCase -}}
 {{- $pkArgs := joinSlices " " $pkNames $colDefs.Types | join ", "}}
 // Find{{$tableNameSingular}}G retrieves a single record by ID.
 func Find{{$tableNameSingular}}G({{$pkArgs}}, selectCols ...string) (*{{$tableNameSingular}}, error) {
@@ -12,7 +12,7 @@ func Find{{$tableNameSingular}}G({{$pkArgs}}, selectCols ...string) (*{{$tableNa
 func Find{{$tableNameSingular}}GP({{$pkArgs}}, selectCols ...string) *{{$tableNameSingular}} {
 	retobj, err := Find{{$tableNameSingular}}(boil.GetDB(), {{$pkNames | join ", "}}, selectCols...)
 	if err != nil {
-		panic(errors.Err(err))
+		panic(boil.WrapErr(err))
 	}
 
 	return retobj
@@ -35,10 +35,10 @@ func Find{{$tableNameSingular}}(exec boil.Executor, {{$pkArgs}}, selectCols ...s
 
 	err := q.Bind({{$varNameSingular}}Obj)
 	if err != nil {
-		if errors.Is(err, sql.ErrNoRows) {
-			return nil, nil
+		if errors.Cause(err) == sql.ErrNoRows {
+			return nil, sql.ErrNoRows
 		}
-		return nil, errors.Prefix("{{.PkgName}}: unable to select from {{.Table.Name}}", err)
+		return nil, errors.Wrap(err, "{{.PkgName}}: unable to select from {{.Table.Name}}")
 	}
 
 	return {{$varNameSingular}}Obj, nil
@@ -48,79 +48,8 @@ func Find{{$tableNameSingular}}(exec boil.Executor, {{$pkArgs}}, selectCols ...s
 func Find{{$tableNameSingular}}P(exec boil.Executor, {{$pkArgs}}, selectCols ...string) *{{$tableNameSingular}} {
 	retobj, err := Find{{$tableNameSingular}}(exec, {{$pkNames | join ", "}}, selectCols...)
 	if err != nil {
-		panic(errors.Err(err))
+		panic(boil.WrapErr(err))
 	}
 
 	return retobj
 }
-
-// FindOne{{$tableNameSingular}} retrieves a single record using filters.
-func FindOne{{$tableNameSingular}}(exec boil.Executor, filters {{$tableNameSingular}}Filter) (*{{$tableNameSingular}}, error) {
-	obj := &{{$tableNameSingular}}{}
-
-	err := {{$tableNameSingular}}NewQuery(exec).
-    Where(filters).
-    Limit(1).
-    Bind(obj)
-
-	if err != nil {
-		if errors.Is(err, sql.ErrNoRows) {
-			return nil, nil
-		}
-		return nil, errors.Prefix("{{.PkgName}}: unable to select from {{.Table.Name}}", err)
-	}
-
-	return obj, nil
-}
-
-// FindOne{{$tableNameSingular}}G retrieves a single record using filters.
-func FindOne{{$tableNameSingular}}G(filters {{$tableNameSingular}}Filter) (*{{$tableNameSingular}}, error) {
-	return FindOne{{$tableNameSingular}}(boil.GetDB(), filters)
-}
-
-// FindOne{{$tableNameSingular}}OrInit retrieves a single record using filters, or initializes a new record if one is not found.
-func FindOne{{$tableNameSingular}}OrInit(exec boil.Executor, filters {{$tableNameSingular}}Filter) (*{{$tableNameSingular}}, error) {
-	{{$varNameSingular}}Obj, err := FindOne{{$tableNameSingular}}(exec, filters)
-	if err != nil {
-		return nil, err
-	}
-
-	if {{$varNameSingular}}Obj == nil {
-		{{$varNameSingular}}Obj = &{{$tableNameSingular}}{}
-		objR := reflect.ValueOf({{$varNameSingular}}Obj).Elem()
-		r := reflect.ValueOf(filters)
-		for i := 0; i < r.NumField(); i++ {
-			f := r.Field(i)
-			if f.Elem().IsValid() {
-				objR.FieldByName(r.Type().Field(i).Name).Set(f.Elem())
-			}
-		}
-	}
-
-	return {{$varNameSingular}}Obj, nil
-}
-
-// FindOne{{$tableNameSingular}}OrInit retrieves a single record using filters, or initializes a new record if one is not found.
-func FindOne{{$tableNameSingular}}OrInitG(filters {{$tableNameSingular}}Filter) (*{{$tableNameSingular}}, error) {
-	return FindOne{{$tableNameSingular}}OrInit(boil.GetDB(), filters)
-}
-
-// FindOne{{$tableNameSingular}}OrInit retrieves a single record using filters, or initializes and inserts a new record if one is not found.
-func FindOne{{$tableNameSingular}}OrCreate(exec boil.Executor, filters {{$tableNameSingular}}Filter) (*{{$tableNameSingular}}, error) {
-	{{$varNameSingular}}Obj, err := FindOne{{$tableNameSingular}}OrInit(exec, filters)
-	if err != nil {
-		return nil, err
-	}
-	if {{$varNameSingular}}Obj.IsNew() {
-		err := {{$varNameSingular}}Obj.Insert(exec)
-		if err != nil {
-			return nil, err
-		}
-	}
-	return {{$varNameSingular}}Obj, nil
-}
-
-// FindOne{{$tableNameSingular}}OrInit retrieves a single record using filters, or initializes and inserts a new record if one is not found.
-func FindOne{{$tableNameSingular}}OrCreateG(filters {{$tableNameSingular}}Filter) (*{{$tableNameSingular}}, error) {
-	return FindOne{{$tableNameSingular}}OrCreate(boil.GetDB(), filters)
-}
diff --git a/templates/15_insert.tpl b/templates/15_insert.tpl
index 20905ac..adf5f72 100644
--- a/templates/15_insert.tpl
+++ b/templates/15_insert.tpl
@@ -10,7 +10,7 @@ func (o *{{$tableNameSingular}}) InsertG(whitelist ... string) error {
 // behavior description.
 func (o *{{$tableNameSingular}}) InsertGP(whitelist ... string) {
 	if err := o.Insert(boil.GetDB(), whitelist...); err != nil {
-		panic(errors.Err(err))
+		panic(boil.WrapErr(err))
 	}
 }
 
@@ -18,7 +18,7 @@ func (o *{{$tableNameSingular}}) InsertGP(whitelist ... string) {
 // for whitelist behavior description.
 func (o *{{$tableNameSingular}}) InsertP(exec boil.Executor, whitelist ... string) {
 	if err := o.Insert(exec, whitelist...); err != nil {
-		panic(errors.Err(err))
+		panic(boil.WrapErr(err))
 	}
 }
 
@@ -29,7 +29,7 @@ func (o *{{$tableNameSingular}}) InsertP(exec boil.Executor, whitelist ... strin
 // - All columns with a default, but non-zero are included (i.e. health = 75)
 func (o *{{$tableNameSingular}}) Insert(exec boil.Executor, whitelist ... string) error {
 	if o == nil {
-		return errors.Err("{{.PkgName}}: no {{.Table.Name}} provided for insertion")
+		return errors.New("{{.PkgName}}: no {{.Table.Name}} provided for insertion")
 	}
 
 	var err error
@@ -37,7 +37,7 @@ func (o *{{$tableNameSingular}}) Insert(exec boil.Executor, whitelist ... string
 
 	{{if not .NoHooks -}}
 	if err := o.doBeforeInsertHooks(exec); err != nil {
-		return errors.Err(err)
+		return err
 	}
 	{{- end}}
 
@@ -59,39 +59,21 @@ func (o *{{$tableNameSingular}}) Insert(exec boil.Executor, whitelist ... string
 
 		cache.valueMapping, err = queries.BindMapping({{$varNameSingular}}Type, {{$varNameSingular}}Mapping, wl)
 		if err != nil {
-			return errors.Err(err)
+			return err
 		}
 		cache.retMapping, err = queries.BindMapping({{$varNameSingular}}Type, {{$varNameSingular}}Mapping, returnColumns)
 		if err != nil {
-			return errors.Err(err)
+			return err
 		}
-		if len(wl) != 0 {
-			cache.query = fmt.Sprintf("INSERT INTO {{$schemaTable}} ({{.LQ}}%s{{.RQ}}) %%sVALUES (%s)%%s", strings.Join(wl, "{{.RQ}},{{.LQ}}"), strmangle.Placeholders(dialect.IndexPlaceholders, len(wl), 1, 1))
-		} else {
-			{{if eq .DriverName "mysql" -}}
-			cache.query = "INSERT INTO {{$schemaTable}} () VALUES ()"
-			{{else -}}
-			cache.query = "INSERT INTO {{$schemaTable}} DEFAULT VALUES"
-			{{end -}}
-		}
-
-		var queryOutput, queryReturning string
+		cache.query = fmt.Sprintf("INSERT INTO {{$schemaTable}} ({{.LQ}}%s{{.RQ}}) VALUES (%s)", strings.Join(wl, "{{.LQ}},{{.RQ}}"), strmangle.Placeholders(dialect.IndexPlaceholders, len(wl), 1, 1))
 
 		if len(cache.retMapping) != 0 {
 			{{if .UseLastInsertID -}}
-			cache.retQuery = fmt.Sprintf("SELECT {{.LQ}}%s{{.RQ}} FROM {{$schemaTable}} WHERE %s", strings.Join(returnColumns, "{{.RQ}},{{.LQ}}"), strmangle.WhereClause("{{.LQ}}", "{{.RQ}}", {{if .Dialect.IndexPlaceholders}}1{{else}}0{{end}}, {{$varNameSingular}}PrimaryKeyColumns))
+			cache.retQuery = fmt.Sprintf("SELECT {{.LQ}}%s{{.RQ}} FROM {{$schemaTable}} WHERE %s", strings.Join(returnColumns, "{{.LQ}},{{.RQ}}"), strmangle.WhereClause("{{.LQ}}", "{{.RQ}}", {{if .Dialect.IndexPlaceholders}}1{{else}}0{{end}}, {{$varNameSingular}}PrimaryKeyColumns))
 			{{else -}}
-				{{if ne .DriverName "mssql" -}}
-			queryReturning = fmt.Sprintf(" RETURNING {{.LQ}}%s{{.RQ}}", strings.Join(returnColumns, "{{.RQ}},{{.LQ}}"))
-				{{else -}}
-			queryOutput = fmt.Sprintf("OUTPUT INSERTED.{{.LQ}}%s{{.RQ}} ", strings.Join(returnColumns, "{{.RQ}},INSERTED.{{.LQ}}"))
-				{{end -}}
+			cache.query += fmt.Sprintf(" RETURNING {{.LQ}}%s{{.RQ}}", strings.Join(returnColumns, "{{.LQ}},{{.RQ}}"))
 			{{end -}}
 		}
-
-		if len(wl) != 0 {
-			cache.query = fmt.Sprintf(cache.query, queryOutput, queryReturning)
-		}
 	}
 
 	value := reflect.Indirect(reflect.ValueOf(o))
@@ -110,7 +92,7 @@ func (o *{{$tableNameSingular}}) Insert(exec boil.Executor, whitelist ... string
 	_, err = exec.Exec(cache.query, vals...)
 	{{- end}}
 	if err != nil {
-		return errors.Prefix("{{.PkgName}}: unable to insert into {{.Table.Name}}", err)
+		return errors.Wrap(err, "{{.PkgName}}: unable to insert into {{.Table.Name}}")
 	}
 	
 	{{if $canLastInsertID -}}
@@ -125,7 +107,7 @@ func (o *{{$tableNameSingular}}) Insert(exec boil.Executor, whitelist ... string
 	{{if $canLastInsertID -}}
 	lastID, err = result.LastInsertId()
 	if err != nil {
-		return errors.Err(ErrSyncFail)
+		return ErrSyncFail
 	}
 
 	{{$colName := index .Table.PKey.Columns 0 -}}
@@ -150,7 +132,7 @@ func (o *{{$tableNameSingular}}) Insert(exec boil.Executor, whitelist ... string
 
 	err = exec.QueryRow(cache.retQuery, identifierCols...).Scan(queries.PtrsFromMapping(value, cache.retMapping)...)
 	if err != nil {
-		return errors.Prefix("{{.PkgName}}: unable to populate default values for {{.Table.Name}}", err)
+		return errors.Wrap(err, "{{.PkgName}}: unable to populate default values for {{.Table.Name}}")
 	}
 	{{else}}
 	if len(cache.retMapping) != 0 {
@@ -160,7 +142,7 @@ func (o *{{$tableNameSingular}}) Insert(exec boil.Executor, whitelist ... string
 	}
 
 	if err != nil {
-		return errors.Prefix("{{.PkgName}}: unable to insert into {{.Table.Name}}", err)
+		return errors.Wrap(err, "{{.PkgName}}: unable to insert into {{.Table.Name}}")
 	}
 	{{end}}
 
diff --git a/templates/16_update.tpl b/templates/16_update.tpl
index a4a688c..0f5dad5 100644
--- a/templates/16_update.tpl
+++ b/templates/16_update.tpl
@@ -12,7 +12,7 @@ func (o *{{$tableNameSingular}}) UpdateG(whitelist ...string) error {
 // Panics on error. See Update for whitelist behavior description.
 func (o *{{$tableNameSingular}}) UpdateGP(whitelist ...string) {
 	if err := o.Update(boil.GetDB(), whitelist...); err != nil {
-		panic(errors.Err(err))
+		panic(boil.WrapErr(err))
 	}
 }
 
@@ -21,7 +21,7 @@ func (o *{{$tableNameSingular}}) UpdateGP(whitelist ...string) {
 func (o *{{$tableNameSingular}}) UpdateP(exec boil.Executor, whitelist ... string) {
 	err := o.Update(exec, whitelist...)
 	if err != nil {
-		panic(errors.Err(err))
+		panic(boil.WrapErr(err))
 	}
 }
 
@@ -38,7 +38,7 @@ func (o *{{$tableNameSingular}}) Update(exec boil.Executor, whitelist ... string
 	var err error
 	{{if not .NoHooks -}}
 	if err = o.doBeforeUpdateHooks(exec); err != nil {
-		return errors.Err(err)
+		return err
 	}
 	{{end -}}
 
@@ -48,19 +48,9 @@ func (o *{{$tableNameSingular}}) Update(exec boil.Executor, whitelist ... string
 	{{$varNameSingular}}UpdateCacheMut.RUnlock()
 
 	if !cached {
-		wl := strmangle.UpdateColumnSet(
-			{{$varNameSingular}}Columns,
-			{{$varNameSingular}}PrimaryKeyColumns,
-			whitelist,
-		)
-		{{if eq .DriverName "mssql"}}
-		wl = strmangle.SetComplement(wl, {{$varNameSingular}}ColumnsWithAuto)
-		{{end}}
-		if len(whitelist) == 0 {
-			wl = strmangle.SetComplement(wl, []string{"created_at","updated_at"})
-		}
+		wl := strmangle.UpdateColumnSet({{$varNameSingular}}Columns, {{$varNameSingular}}PrimaryKeyColumns, whitelist)
 		if len(wl) == 0 {
-			return errors.Err("{{.PkgName}}: unable to update {{.Table.Name}}, could not build whitelist")
+			return errors.New("{{.PkgName}}: unable to update {{.Table.Name}}, could not build whitelist")
 		}
 
 		cache.query = fmt.Sprintf("UPDATE {{$schemaTable}} SET %s WHERE %s",
@@ -69,7 +59,7 @@ func (o *{{$tableNameSingular}}) Update(exec boil.Executor, whitelist ... string
 		)
 		cache.valueMapping, err = queries.BindMapping({{$varNameSingular}}Type, {{$varNameSingular}}Mapping, append(wl, {{$varNameSingular}}PrimaryKeyColumns...))
 		if err != nil {
-			return errors.Err(err)
+			return err
 		}
 	}
 
@@ -82,7 +72,7 @@ func (o *{{$tableNameSingular}}) Update(exec boil.Executor, whitelist ... string
 
 	_, err = exec.Exec(cache.query, values...)
 	if err != nil {
-		return errors.Prefix("{{.PkgName}}: unable to update {{.Table.Name}} row", err)
+		return errors.Wrap(err, "{{.PkgName}}: unable to update {{.Table.Name}} row")
 	}
 
 	if !cached {
@@ -99,19 +89,19 @@ func (o *{{$tableNameSingular}}) Update(exec boil.Executor, whitelist ... string
 }
 
 // UpdateAllP updates all rows with matching column names, and panics on error.
-func (q {{$tableNameSingular}}Query) UpdateAllP(cols M) {
+func (q {{$varNameSingular}}Query) UpdateAllP(cols M) {
 	if err := q.UpdateAll(cols); err != nil {
-		panic(errors.Err(err))
+		panic(boil.WrapErr(err))
 	}
 }
 
 // UpdateAll updates all rows with the specified column values.
-func (q {{$tableNameSingular}}Query) UpdateAll(cols M) error {
+func (q {{$varNameSingular}}Query) UpdateAll(cols M) error {
 	queries.SetUpdate(q.Query, cols)
 
 	_, err := q.Query.Exec()
 	if err != nil {
-		return errors.Prefix("{{.PkgName}}: unable to update all for {{.Table.Name}}", err)
+		return errors.Wrap(err, "{{.PkgName}}: unable to update all for {{.Table.Name}}")
 	}
 
 	return nil
@@ -125,14 +115,14 @@ func (o {{$tableNameSingular}}Slice) UpdateAllG(cols M) error {
 // UpdateAllGP updates all rows with the specified column values, and panics on error.
 func (o {{$tableNameSingular}}Slice) UpdateAllGP(cols M) {
 	if err := o.UpdateAll(boil.GetDB(), cols); err != nil {
-		panic(errors.Err(err))
+		panic(boil.WrapErr(err))
 	}
 }
 
 // UpdateAllP updates all rows with the specified column values, and panics on error.
 func (o {{$tableNameSingular}}Slice) UpdateAllP(exec boil.Executor, cols M) {
 	if err := o.UpdateAll(exec, cols); err != nil {
-		panic(errors.Err(err))
+		panic(boil.WrapErr(err))
 	}
 }
 
@@ -144,7 +134,7 @@ func (o {{$tableNameSingular}}Slice) UpdateAll(exec boil.Executor, cols M) error
 	}
 
 	if len(cols) == 0 {
-		return errors.Err("{{.PkgName}}: update all requires at least one column argument")
+		return errors.New("{{.PkgName}}: update all requires at least one column argument")
 	}
 
 	colNames := make([]string, len(cols))
@@ -162,10 +152,12 @@ func (o {{$tableNameSingular}}Slice) UpdateAll(exec boil.Executor, cols M) error
 		pkeyArgs := queries.ValuesFromMapping(reflect.Indirect(reflect.ValueOf(obj)), {{$varNameSingular}}PrimaryKeyMapping)
 		args = append(args, pkeyArgs...)
 	}
-	
-	sql := fmt.Sprintf("UPDATE {{$schemaTable}} SET %s WHERE %s",
+
+	sql := fmt.Sprintf(
+		"UPDATE {{$schemaTable}} SET %s WHERE ({{.LQ}}{{.Table.PKey.Columns | join (printf "%s,%s" .LQ .RQ)}}{{.RQ}}) IN (%s)",
 		strmangle.SetParamNames("{{.LQ}}", "{{.RQ}}", {{if .Dialect.IndexPlaceholders}}1{{else}}0{{end}}, colNames),
-		strmangle.WhereClauseRepeated(string(dialect.LQ), string(dialect.RQ), {{if .Dialect.IndexPlaceholders}}len(colNames)+1{{else}}0{{end}}, {{$varNameSingular}}PrimaryKeyColumns, len(o)))
+		strmangle.Placeholders(dialect.IndexPlaceholders, len(o) * len({{$varNameSingular}}PrimaryKeyColumns), len(colNames)+1, len({{$varNameSingular}}PrimaryKeyColumns)),
+	)
 
 	if boil.DebugMode {
 		fmt.Fprintln(boil.DebugWriter, sql)
@@ -174,7 +166,7 @@ func (o {{$tableNameSingular}}Slice) UpdateAll(exec boil.Executor, cols M) error
 
 	_, err := exec.Exec(sql, args...)
 	if err != nil {
-		return errors.Prefix("{{.PkgName}}: unable to update all in {{$varNameSingular}} slice", err)
+		return errors.Wrap(err, "{{.PkgName}}: unable to update all in {{$varNameSingular}} slice")
 	}
 
 	return nil
diff --git a/templates/17_upsert.tpl b/templates/17_upsert.tpl
index b713243..f9f0f55 100644
--- a/templates/17_upsert.tpl
+++ b/templates/17_upsert.tpl
@@ -2,36 +2,36 @@
 {{- $varNameSingular := .Table.Name | singular | camelCase -}}
 {{- $schemaTable := .Table.Name | .SchemaTable}}
 // UpsertG attempts an insert, and does an update or ignore on conflict.
-func (o *{{$tableNameSingular}}) UpsertG({{if eq .DriverName "postgres"}}updateOnConflict bool, conflictColumns []string, {{end}}updateColumns []string,	whitelist ...string) error {
-	return o.Upsert(boil.GetDB(), {{if eq .DriverName "postgres"}}updateOnConflict, conflictColumns, {{end}}updateColumns, whitelist...)
+func (o *{{$tableNameSingular}}) UpsertG({{if ne .DriverName "mysql"}}updateOnConflict bool, conflictColumns []string, {{end}}updateColumns []string,	whitelist ...string) error {
+	return o.Upsert(boil.GetDB(), {{if ne .DriverName "mysql"}}updateOnConflict, conflictColumns, {{end}}updateColumns, whitelist...)
 }
 
 // UpsertGP attempts an insert, and does an update or ignore on conflict. Panics on error.
-func (o *{{$tableNameSingular}}) UpsertGP({{if eq .DriverName "postgres"}}updateOnConflict bool, conflictColumns []string, {{end}}updateColumns []string,	whitelist ...string) {
-	if err := o.Upsert(boil.GetDB(), {{if eq .DriverName "postgres"}}updateOnConflict, conflictColumns, {{end}}updateColumns, whitelist...); err != nil {
-		panic(errors.Err(err))
+func (o *{{$tableNameSingular}}) UpsertGP({{if ne .DriverName "mysql"}}updateOnConflict bool, conflictColumns []string, {{end}}updateColumns []string,	whitelist ...string) {
+	if err := o.Upsert(boil.GetDB(), {{if ne .DriverName "mysql"}}updateOnConflict, conflictColumns, {{end}}updateColumns, whitelist...); err != nil {
+		panic(boil.WrapErr(err))
 	}
 }
 
 // UpsertP attempts an insert using an executor, and does an update or ignore on conflict.
 // UpsertP panics on error.
-func (o *{{$tableNameSingular}}) UpsertP(exec boil.Executor, {{if eq .DriverName "postgres"}}updateOnConflict bool, conflictColumns []string, {{end}}updateColumns []string,	whitelist ...string) {
-	if err := o.Upsert(exec, {{if eq .DriverName "postgres"}}updateOnConflict, conflictColumns, {{end}}updateColumns, whitelist...); err != nil {
-		panic(errors.Err(err))
+func (o *{{$tableNameSingular}}) UpsertP(exec boil.Executor, {{if ne .DriverName "mysql"}}updateOnConflict bool, conflictColumns []string, {{end}}updateColumns []string,	whitelist ...string) {
+	if err := o.Upsert(exec, {{if ne .DriverName "mysql"}}updateOnConflict, conflictColumns, {{end}}updateColumns, whitelist...); err != nil {
+		panic(boil.WrapErr(err))
 	}
 }
 
 // Upsert attempts an insert using an executor, and does an update or ignore on conflict.
-func (o *{{$tableNameSingular}}) Upsert(exec boil.Executor, {{if eq .DriverName "postgres"}}updateOnConflict bool, conflictColumns []string, {{end}}updateColumns []string, whitelist ...string) error {
+func (o *{{$tableNameSingular}}) Upsert(exec boil.Executor, {{if ne .DriverName "mysql"}}updateOnConflict bool, conflictColumns []string, {{end}}updateColumns []string, whitelist ...string) error {
 	if o == nil {
-		return errors.Err("{{.PkgName}}: no {{.Table.Name}} provided for upsert")
+		return errors.New("{{.PkgName}}: no {{.Table.Name}} provided for upsert")
 	}
 
 	{{- template "timestamp_upsert_helper" . }}
 
 	{{if not .NoHooks -}}
 	if err := o.doBeforeUpsertHooks(exec); err != nil {
-		return errors.Err(err)
+		return err
 	}
 	{{- end}}
 
@@ -39,7 +39,7 @@ func (o *{{$tableNameSingular}}) Upsert(exec boil.Executor, {{if eq .DriverName
 
 	// Build cache key in-line uglily - mysql vs postgres problems
 	buf := strmangle.GetBuffer()
-	{{if eq .DriverName "postgres"}}
+	{{if ne .DriverName "mysql" -}}
 	if updateOnConflict {
 		buf.WriteByte('t')
 	} else {
@@ -72,69 +72,46 @@ func (o *{{$tableNameSingular}}) Upsert(exec boil.Executor, {{if eq .DriverName
 	var err error
 
 	if !cached {
-		insert, ret := strmangle.InsertColumnSet(
+		var ret []string
+		whitelist, ret = strmangle.InsertColumnSet(
 			{{$varNameSingular}}Columns,
 			{{$varNameSingular}}ColumnsWithDefault,
 			{{$varNameSingular}}ColumnsWithoutDefault,
 			nzDefaults,
 			whitelist,
 		)
-		{{if eq .DriverName "mssql" -}}
-		insert = strmangle.SetComplement(insert, {{$varNameSingular}}ColumnsWithAuto)
-		for i, v := range insert {
-			if strmangle.ContainsAny({{$varNameSingular}}PrimaryKeyColumns, v) && strmangle.ContainsAny({{$varNameSingular}}ColumnsWithDefault, v) {
-				insert = append(insert[:i], insert[i+1:]...)
-			}
-		}
-		if len(insert) == 0 {
-			return errors.Err("{{.PkgName}}: unable to upsert {{.Table.Name}}, could not build insert column list")
-		}
-
-		ret = strmangle.SetMerge(ret, {{$varNameSingular}}ColumnsWithAuto)
-		ret = strmangle.SetMerge(ret, {{$varNameSingular}}ColumnsWithDefault)
-
-		{{end}}
 		update := strmangle.UpdateColumnSet(
 			{{$varNameSingular}}Columns,
 			{{$varNameSingular}}PrimaryKeyColumns,
 			updateColumns,
 		)
-		{{if eq .DriverName "mssql" -}}
-		update = strmangle.SetComplement(update, {{$varNameSingular}}ColumnsWithAuto)
-		{{end -}}
-
 		if len(update) == 0 {
-			return errors.Err("{{.PkgName}}: unable to upsert {{.Table.Name}}, could not build update column list")
+			return errors.New("{{.PkgName}}: unable to upsert {{.Table.Name}}, could not build update column list")
 		}
 
-		{{if eq .DriverName "postgres"}}
+		{{if ne .DriverName "mysql" -}}
 		conflict := conflictColumns
 		if len(conflict) == 0 {
 			conflict = make([]string, len({{$varNameSingular}}PrimaryKeyColumns))
 			copy(conflict, {{$varNameSingular}}PrimaryKeyColumns)
 		}
-		cache.query = queries.BuildUpsertQueryPostgres(dialect, "{{$schemaTable}}", updateOnConflict, ret, update, conflict, insert)
-		{{else if eq .DriverName "mysql"}}
-		cache.query = queries.BuildUpsertQueryMySQL(dialect, "{{.Table.Name}}", update, insert, {{$varNameSingular}}AutoIncrementColumn)
+		cache.query = queries.BuildUpsertQueryPostgres(dialect, "{{$schemaTable}}", updateOnConflict, ret, update, conflict, whitelist)
+		{{- else -}}
+		cache.query = queries.BuildUpsertQueryMySQL(dialect, "{{.Table.Name}}", update, whitelist)
 		cache.retQuery = fmt.Sprintf(
 			"SELECT %s FROM {{.LQ}}{{.Table.Name}}{{.RQ}} WHERE {{whereClause .LQ .RQ 0 .Table.PKey.Columns}}",
 			strings.Join(strmangle.IdentQuoteSlice(dialect.LQ, dialect.RQ, ret), ","),
 		)
-		{{else if eq .DriverName "mssql"}}
-		cache.query = queries.BuildUpsertQueryMSSQL(dialect, "{{.Table.Name}}", {{$varNameSingular}}PrimaryKeyColumns, update, insert, ret)
-
-		whitelist = append({{$varNameSingular}}PrimaryKeyColumns, update...)
-		whitelist = append(whitelist, insert...)
 		{{- end}}
 
-		cache.valueMapping, err = queries.BindMapping({{$varNameSingular}}Type, {{$varNameSingular}}Mapping, {{if eq .DriverName "mssql"}}whitelist{{else}}insert{{end}})
+		cache.valueMapping, err = queries.BindMapping({{$varNameSingular}}Type, {{$varNameSingular}}Mapping, whitelist)
 		if err != nil {
-			return errors.Err(err)
+			return err
 		}
 		if len(ret) != 0 {
 			cache.retMapping, err = queries.BindMapping({{$varNameSingular}}Type, {{$varNameSingular}}Mapping, ret)
 			if err != nil {
-				return errors.Err(err)
+				return err
 			}
 		}
 	}
@@ -159,7 +136,7 @@ func (o *{{$tableNameSingular}}) Upsert(exec boil.Executor, {{if eq .DriverName
 	_, err = exec.Exec(cache.query, vals...)
 	{{- end}}
 	if err != nil {
-		return errors.Prefix("{{.PkgName}}: unable to upsert for {{.Table.Name}}", err)
+		return errors.Wrap(err, "{{.PkgName}}: unable to upsert for {{.Table.Name}}")
 	}
 
 	{{if $canLastInsertID -}}
@@ -174,7 +151,7 @@ func (o *{{$tableNameSingular}}) Upsert(exec boil.Executor, {{if eq .DriverName
 	{{if $canLastInsertID -}}
 	lastID, err = result.LastInsertId()
 	if err != nil {
-		return errors.Err(ErrSyncFail)
+		return ErrSyncFail
 	}
 
 	{{$colName := index .Table.PKey.Columns 0 -}}
@@ -199,19 +176,16 @@ func (o *{{$tableNameSingular}}) Upsert(exec boil.Executor, {{if eq .DriverName
 
 	err = exec.QueryRow(cache.retQuery, identifierCols...).Scan(returns...)
 	if err != nil {
-		return errors.Prefix("{{.PkgName}}: unable to populate default values for {{.Table.Name}}", err)
+		return errors.Wrap(err, "{{.PkgName}}: unable to populate default values for {{.Table.Name}}")
 	}
 	{{- else}}
 	if len(cache.retMapping) != 0 {
 		err = exec.QueryRow(cache.query, vals...).Scan(returns...)
-		if err == sql.ErrNoRows {
-			err = nil // Postgres doesn't return anything when there's no update
-		}
 	} else {
 		_, err = exec.Exec(cache.query, vals...)
 	}
 	if err != nil {
-		return errors.Prefix("{{.PkgName}}: unable to upsert {{.Table.Name}}", err)
+		return errors.Wrap(err, "{{.PkgName}}: unable to upsert for {{.Table.Name}}")
 	}
 	{{- end}}
 
diff --git a/templates/18_delete.tpl b/templates/18_delete.tpl
index 1ab82a3..c34822e 100644
--- a/templates/18_delete.tpl
+++ b/templates/18_delete.tpl
@@ -6,7 +6,7 @@
 // Panics on error.
 func (o *{{$tableNameSingular}}) DeleteP(exec boil.Executor) {
 	if err := o.Delete(exec); err != nil {
-	panic(errors.Err(err))
+	panic(boil.WrapErr(err))
 	}
 }
 
@@ -14,7 +14,7 @@ func (o *{{$tableNameSingular}}) DeleteP(exec boil.Executor) {
 // DeleteG will match against the primary key column to find the record to delete.
 func (o *{{$tableNameSingular}}) DeleteG() error {
 	if o == nil {
-	  return errors.Err("{{.PkgName}}: no {{$tableNameSingular}} provided for deletion")
+	return errors.New("{{.PkgName}}: no {{$tableNameSingular}} provided for deletion")
 	}
 
 	return o.Delete(boil.GetDB())
@@ -25,7 +25,7 @@ func (o *{{$tableNameSingular}}) DeleteG() error {
 // Panics on error.
 func (o *{{$tableNameSingular}}) DeleteGP() {
 	if err := o.DeleteG(); err != nil {
-	  panic(errors.Err(err))
+	panic(boil.WrapErr(err))
 	}
 }
 
@@ -33,12 +33,12 @@ func (o *{{$tableNameSingular}}) DeleteGP() {
 // Delete will match against the primary key column to find the record to delete.
 func (o *{{$tableNameSingular}}) Delete(exec boil.Executor) error {
 	if o == nil {
-	  return errors.Err("{{.PkgName}}: no {{$tableNameSingular}} provided for delete")
+	return errors.New("{{.PkgName}}: no {{$tableNameSingular}} provided for delete")
 	}
 
 	{{if not .NoHooks -}}
 	if err := o.doBeforeDeleteHooks(exec); err != nil {
-	  return errors.Err(err)
+	return err
 	}
 	{{- end}}
 
@@ -52,12 +52,12 @@ func (o *{{$tableNameSingular}}) Delete(exec boil.Executor) error {
 
 	_, err := exec.Exec(sql, args...)
 	if err != nil {
-  	return errors.Prefix("{{.PkgName}}: unable to delete from {{.Table.Name}}", err)
+	return errors.Wrap(err, "{{.PkgName}}: unable to delete from {{.Table.Name}}")
 	}
 
 	{{if not .NoHooks -}}
 	if err := o.doAfterDeleteHooks(exec); err != nil {
-	  return errors.Err(err)
+	return err
 	}
 	{{- end}}
 
@@ -65,23 +65,23 @@ func (o *{{$tableNameSingular}}) Delete(exec boil.Executor) error {
 }
 
 // DeleteAllP deletes all rows, and panics on error.
-func (q {{$tableNameSingular}}Query) DeleteAllP() {
+func (q {{$varNameSingular}}Query) DeleteAllP() {
 	if err := q.DeleteAll(); err != nil {
-	  panic(errors.Err(err))
+	panic(boil.WrapErr(err))
 	}
 }
 
 // DeleteAll deletes all matching rows.
-func (q {{$tableNameSingular}}Query) DeleteAll() error {
+func (q {{$varNameSingular}}Query) DeleteAll() error {
 	if q.Query == nil {
-	  return errors.Err("{{.PkgName}}: no {{$tableNameSingular}}Query provided for delete all")
+	return errors.New("{{.PkgName}}: no {{$varNameSingular}}Query provided for delete all")
 	}
 
 	queries.SetDelete(q.Query)
 
 	_, err := q.Query.Exec()
 	if err != nil {
-	  return errors.Prefix("{{.PkgName}}: unable to delete all from {{.Table.Name}}", err)
+	return errors.Wrap(err, "{{.PkgName}}: unable to delete all from {{.Table.Name}}")
 	}
 
 	return nil
@@ -90,14 +90,14 @@ func (q {{$tableNameSingular}}Query) DeleteAll() error {
 // DeleteAllGP deletes all rows in the slice, and panics on error.
 func (o {{$tableNameSingular}}Slice) DeleteAllGP() {
 	if err := o.DeleteAllG(); err != nil {
-	  panic(errors.Err(err))
+	panic(boil.WrapErr(err))
 	}
 }
 
 // DeleteAllG deletes all rows in the slice.
 func (o {{$tableNameSingular}}Slice) DeleteAllG() error {
 	if o == nil {
-	  return errors.Err("{{.PkgName}}: no {{$tableNameSingular}} slice provided for delete all")
+	return errors.New("{{.PkgName}}: no {{$tableNameSingular}} slice provided for delete all")
 	}
 	return o.DeleteAll(boil.GetDB())
 }
@@ -105,14 +105,14 @@ func (o {{$tableNameSingular}}Slice) DeleteAllG() error {
 // DeleteAllP deletes all rows in the slice, using an executor, and panics on error.
 func (o {{$tableNameSingular}}Slice) DeleteAllP(exec boil.Executor) {
 	if err := o.DeleteAll(exec); err != nil {
-	  panic(errors.Err(err))
+	panic(boil.WrapErr(err))
 	}
 }
 
 // DeleteAll deletes all rows in the slice, using an executor.
 func (o {{$tableNameSingular}}Slice) DeleteAll(exec boil.Executor) error {
 	if o == nil {
-		return errors.Err("{{.PkgName}}: no {{$tableNameSingular}} slice provided for delete all")
+		return errors.New("{{.PkgName}}: no {{$tableNameSingular}} slice provided for delete all")
 	}
 
 	if len(o) == 0 {
@@ -123,7 +123,7 @@ func (o {{$tableNameSingular}}Slice) DeleteAll(exec boil.Executor) error {
 	if len({{$varNameSingular}}BeforeDeleteHooks) != 0 {
 		for _, obj := range o {
 			if err := obj.doBeforeDeleteHooks(exec); err != nil {
-				return errors.Err(err)
+				return err
 			}
 		}
 	}
@@ -135,8 +135,11 @@ func (o {{$tableNameSingular}}Slice) DeleteAll(exec boil.Executor) error {
 		args = append(args, pkeyArgs...)
 	}
 
-	sql := "DELETE FROM {{$schemaTable}} WHERE " +
-		strmangle.WhereClauseRepeated(string(dialect.LQ), string(dialect.RQ), {{if .Dialect.IndexPlaceholders}}1{{else}}0{{end}}, {{$varNameSingular}}PrimaryKeyColumns, len(o))
+	sql := fmt.Sprintf(
+		"DELETE FROM {{$schemaTable}} WHERE (%s) IN (%s)",
+		strings.Join(strmangle.IdentQuoteSlice(dialect.LQ, dialect.RQ, {{$varNameSingular}}PrimaryKeyColumns), ","),
+		strmangle.Placeholders(dialect.IndexPlaceholders, len(o) * len({{$varNameSingular}}PrimaryKeyColumns), 1, len({{$varNameSingular}}PrimaryKeyColumns)),
+	)
 
 	if boil.DebugMode {
 		fmt.Fprintln(boil.DebugWriter, sql)
@@ -145,14 +148,14 @@ func (o {{$tableNameSingular}}Slice) DeleteAll(exec boil.Executor) error {
 
 	_, err := exec.Exec(sql, args...)
 	if err != nil {
-		return errors.Prefix("{{.PkgName}}: unable to delete all from {{$varNameSingular}} slice", err)
+		return errors.Wrap(err, "{{.PkgName}}: unable to delete all from {{$varNameSingular}} slice")
 	}
 
 	{{if not .NoHooks -}}
 	if len({{$varNameSingular}}AfterDeleteHooks) != 0 {
 		for _, obj := range o {
 			if err := obj.doAfterDeleteHooks(exec); err != nil {
-				return errors.Err(err)
+				return err
 			}
 		}
 	}
diff --git a/templates/19_reload.tpl b/templates/19_reload.tpl
index c7c5273..45c0e61 100644
--- a/templates/19_reload.tpl
+++ b/templates/19_reload.tpl
@@ -5,21 +5,21 @@
 // ReloadGP refetches the object from the database and panics on error.
 func (o *{{$tableNameSingular}}) ReloadGP() {
 	if err := o.ReloadG(); err != nil {
-		panic(errors.Err(err))
+		panic(boil.WrapErr(err))
 	}
 }
 
 // ReloadP refetches the object from the database with an executor. Panics on error.
 func (o *{{$tableNameSingular}}) ReloadP(exec boil.Executor) {
 	if err := o.Reload(exec); err != nil {
-		panic(errors.Err(err))
+		panic(boil.WrapErr(err))
 	}
 }
 
 // ReloadG refetches the object from the database using the primary keys.
 func (o *{{$tableNameSingular}}) ReloadG() error {
 	if o == nil {
-		return errors.Err("{{.PkgName}}: no {{$tableNameSingular}} provided for reload")
+		return errors.New("{{.PkgName}}: no {{$tableNameSingular}} provided for reload")
 	}
 
 	return o.Reload(boil.GetDB())
@@ -30,7 +30,7 @@ func (o *{{$tableNameSingular}}) ReloadG() error {
 func (o *{{$tableNameSingular}}) Reload(exec boil.Executor) error {
 	ret, err := Find{{$tableNameSingular}}(exec, {{.Table.PKey.Columns | stringMap .StringFuncs.titleCase | prefixStringSlice "o." | join ", "}})
 	if err != nil {
-		return errors.Err(err)
+		return err
 	}
 
 	*o = *ret
@@ -42,7 +42,7 @@ func (o *{{$tableNameSingular}}) Reload(exec boil.Executor) error {
 // Panics on error.
 func (o *{{$tableNameSingular}}Slice) ReloadAllGP() {
 	if err := o.ReloadAllG(); err != nil {
-		panic(errors.Err(err))
+		panic(boil.WrapErr(err))
 	}
 }
 
@@ -51,7 +51,7 @@ func (o *{{$tableNameSingular}}Slice) ReloadAllGP() {
 // Panics on error.
 func (o *{{$tableNameSingular}}Slice) ReloadAllP(exec boil.Executor) {
 	if err := o.ReloadAll(exec); err != nil {
-		panic(errors.Err(err))
+		panic(boil.WrapErr(err))
 	}
 }
 
@@ -59,7 +59,7 @@ func (o *{{$tableNameSingular}}Slice) ReloadAllP(exec boil.Executor) {
 // and overwrites the original object slice with the newly updated slice.
 func (o *{{$tableNameSingular}}Slice) ReloadAllG() error {
 	if o == nil {
-		return errors.Err("{{.PkgName}}: empty {{$tableNameSingular}}Slice provided for reload all")
+		return errors.New("{{.PkgName}}: empty {{$tableNameSingular}}Slice provided for reload all")
 	}
 
 	return o.ReloadAll(boil.GetDB())
@@ -79,14 +79,17 @@ func (o *{{$tableNameSingular}}Slice) ReloadAll(exec boil.Executor) error {
 		args = append(args, pkeyArgs...)
 	}
 
-	sql := "SELECT {{$schemaTable}}.* FROM {{$schemaTable}} WHERE " +
-		strmangle.WhereClauseRepeated(string(dialect.LQ), string(dialect.RQ), {{if .Dialect.IndexPlaceholders}}1{{else}}0{{end}}, {{$varNameSingular}}PrimaryKeyColumns, len(*o))
+	sql := fmt.Sprintf(
+		"SELECT {{$schemaTable}}.* FROM {{$schemaTable}} WHERE (%s) IN (%s)",
+		strings.Join(strmangle.IdentQuoteSlice(dialect.LQ, dialect.RQ, {{$varNameSingular}}PrimaryKeyColumns), ","),
+		strmangle.Placeholders(dialect.IndexPlaceholders, len(*o) * len({{$varNameSingular}}PrimaryKeyColumns), 1, len({{$varNameSingular}}PrimaryKeyColumns)),
+	)
 
 	q := queries.Raw(exec, sql, args...)
 
 	err := q.Bind(&{{$varNamePlural}})
 	if err != nil {
-		return errors.Prefix("{{.PkgName}}: unable to reload all in {{$tableNameSingular}}Slice", err)
+		return errors.Wrap(err, "{{.PkgName}}: unable to reload all in {{$tableNameSingular}}Slice")
 	}
 
 	*o = {{$varNamePlural}}
diff --git a/templates/20_exists.tpl b/templates/20_exists.tpl
index 2a36c23..2e768f1 100644
--- a/templates/20_exists.tpl
+++ b/templates/20_exists.tpl
@@ -1,17 +1,13 @@
 {{- $tableNameSingular := .Table.Name | singular | titleCase -}}
-{{- $varNameSingular := .Table.Name | singular | camelCase -}}
 {{- $colDefs := sqlColDefinitions .Table.Columns .Table.PKey.Columns -}}
-{{- $pkNames := $colDefs.Names | stringMap .StringFuncs.camelCase | stringMap .StringFuncs.replaceReserved -}}
+{{- $pkNames := $colDefs.Names | stringMap .StringFuncs.camelCase -}}
 {{- $pkArgs := joinSlices " " $pkNames $colDefs.Types | join ", " -}}
 {{- $schemaTable := .Table.Name | .SchemaTable}}
 // {{$tableNameSingular}}Exists checks if the {{$tableNameSingular}} row exists.
 func {{$tableNameSingular}}Exists(exec boil.Executor, {{$pkArgs}}) (bool, error) {
 	var exists bool
-	{{if eq .DriverName "mssql" -}}
-	sql := "select case when exists(select top(1) 1 from {{$schemaTable}} where {{if .Dialect.IndexPlaceholders}}{{whereClause .LQ .RQ 1 .Table.PKey.Columns}}{{else}}{{whereClause .LQ .RQ 0 .Table.PKey.Columns}}{{end}}) then 1 else 0 end"
-	{{- else -}}
+
 	sql := "select exists(select 1 from {{$schemaTable}} where {{if .Dialect.IndexPlaceholders}}{{whereClause .LQ .RQ 1 .Table.PKey.Columns}}{{else}}{{whereClause .LQ .RQ 0 .Table.PKey.Columns}}{{end}} limit 1)"
-	{{- end}}
 
 	if boil.DebugMode {
 		fmt.Fprintln(boil.DebugWriter, sql)
@@ -22,7 +18,7 @@ func {{$tableNameSingular}}Exists(exec boil.Executor, {{$pkArgs}}) (bool, error)
 
 	err := row.Scan(&exists)
 	if err != nil {
-		return false, errors.Prefix("{{.PkgName}}: unable to check if {{.Table.Name}} exists", err)
+		return false, errors.Wrap(err, "{{.PkgName}}: unable to check if {{.Table.Name}} exists")
 	}
 
 	return exists, nil
@@ -37,7 +33,7 @@ func {{$tableNameSingular}}ExistsG({{$pkArgs}}) (bool, error) {
 func {{$tableNameSingular}}ExistsGP({{$pkArgs}}) bool {
 	e, err := {{$tableNameSingular}}Exists(boil.GetDB(), {{$pkNames | join ", "}})
 	if err != nil {
-		panic(errors.Err(err))
+		panic(boil.WrapErr(err))
 	}
 
 	return e
@@ -47,43 +43,8 @@ func {{$tableNameSingular}}ExistsGP({{$pkArgs}}) bool {
 func {{$tableNameSingular}}ExistsP(exec boil.Executor, {{$pkArgs}}) bool {
 	e, err := {{$tableNameSingular}}Exists(exec, {{$pkNames | join ", "}})
 	if err != nil {
-		panic(errors.Err(err))
+		panic(boil.WrapErr(err))
 	}
 
 	return e
 }
-
-// IsNew() checks if record exists in db (aka if its primary key is set).
-func (o *{{$tableNameSingular}}) IsNew() bool {
-	r := reflect.ValueOf(o).Elem()
-	for i := 0; i < r.NumField(); i++ {
-		column := r.Type().Field(i).Tag.Get("boil")
-		for _, pkColumn := range {{$varNameSingular}}PrimaryKeyColumns {
-			if column == pkColumn {
-				field := r.Field(i)
-				if field.Interface() != reflect.Zero(field.Type()).Interface() {
-					return false
-				}
-			}
-		}
-	}
-	return true
-}
-
-// Save() inserts the record if it does not exist, or updates it if it does.
-func (o *{{$tableNameSingular}}) Save(exec boil.Executor, whitelist ...string) error {
-  if o.IsNew() {
-    return o.Insert(exec, whitelist...)
-  } else {
-    return o.Update(exec, whitelist...)
-  }
-}
-
-// SaveG() inserts the record if it does not exist, or updates it if it does.
-func (o *{{$tableNameSingular}}) SaveG(whitelist ...string) error {
-  if o.IsNew() {
-    return o.InsertG(whitelist...)
-  } else {
-    return o.UpdateG(whitelist...)
-  }
-}
diff --git a/templates/22_query.tpl b/templates/22_query.tpl
deleted file mode 100644
index 2dcf973..0000000
--- a/templates/22_query.tpl
+++ /dev/null
@@ -1,33 +0,0 @@
-{{- $tableNameSingular := .Table.Name | singular | titleCase -}}
-
-// {{$tableNameSingular}}NewQuery filters query results
-func {{$tableNameSingular}}NewQuery(exec boil.Executor) *{{$tableNameSingular}}Query {
-	return &{{$tableNameSingular}}Query{NewQuery(exec, qm.Select("*"), qm.From("{{.Table.Name | .SchemaTable}}"))}
-}
-
-// {{$tableNameSingular}}NewQuery filters query results
-func {{$tableNameSingular}}NewQueryG() *{{$tableNameSingular}}Query {
-	return {{$tableNameSingular}}NewQuery(boil.GetDB())
-}
-
-// Where filters query results
-func (q *{{$tableNameSingular}}Query) Where(filters {{$tableNameSingular}}Filter) *{{$tableNameSingular}}Query {
-	r := reflect.ValueOf(filters)
-	for i := 0; i < r.NumField(); i++ {
-		f := r.Field(i)
-		if f.Elem().IsValid() {
-			if nullable, ok := f.Elem().Interface().(null.Nullable); ok && nullable.IsNull() {
-				queries.AppendWhere(q.Query, r.Type().Field(i).Tag.Get("boil")+" IS NULL")
-			} else {
-				queries.AppendWhere(q.Query, r.Type().Field(i).Tag.Get("boil")+" = ?", f.Elem().Interface())
-			}
-		}
-	}
-	return q
-}
-
-// Limit limits query results
-func (q *{{$tableNameSingular}}Query) Limit(limit int) *{{$tableNameSingular}}Query {
-	queries.SetLimit(q.Query, limit)
-	return q
-}
\ No newline at end of file
diff --git a/templates/23_merge.tpl b/templates/23_merge.tpl
deleted file mode 100644
index 16a7b58..0000000
--- a/templates/23_merge.tpl
+++ /dev/null
@@ -1,107 +0,0 @@
-{{- $tableNamePlural := .Table.Name | plural | titleCase -}}
-{{- $tableNameSingular := .Table.Name | singular | titleCase -}}
-{{- if .Table.IsJoinTable -}}
-{{- else -}}
-	{{- $dot := . }}
-// Merge combines two {{$tableNamePlural}} into one. The primary record will be kept, and the secondary will be deleted.
-func Merge{{$tableNamePlural}}(exec boil.Executor, primaryID uint64, secondaryID uint64) (err error) {
-	tx, ok := exec.(boil.Transactor)
-	if !ok {
-		txdb, ok := exec.(boil.Beginner)
-		if !ok {
-			return errors.Err("database does not support transactions")
-		}
-
-		tx, err = txdb.Begin()
-		if err != nil {
-			return errors.Err(err)
-		}
-
-		defer func() {
-			if p := recover(); p != nil {
-				tx.Rollback()
-				panic(p) // Rollback, then propagate panic
-			} else if err != nil {
-				tx.Rollback()
-			} else {
-				err = tx.Commit()
-			}
-		}()
-	}
-
-  primary, err := Find{{$tableNameSingular}}(tx, primaryID)
-  if err != nil {
-    return errors.Err(err)
-  } else if primary == nil {
-		return errors.Err("primary {{$tableNameSingular}} not found")
-	}
-
-  secondary, err := Find{{$tableNameSingular}}(tx, secondaryID)
-  if err != nil {
-    return errors.Err(err)
-  } else if secondary == nil {
-		return errors.Err("secondary {{$tableNameSingular}} not found")
-	}
-
-  foreignKeys := []foreignKey{
-	{{- range .Tables -}}
-	  {{- range .FKeys -}}
-	    {{- if eq $dot.Table.Name .ForeignTable }}
-		  {foreignTable: "{{.Table}}", foreignColumn: "{{.Column}}"},
-      {{- end -}}
-    {{- end -}}
-  {{- end }}
-  }
-
-  conflictingKeys := []conflictingUniqueKey{
-    {{- range .Tables -}}
-      {{- $table := . -}}
-      {{- range .FKeys -}}
-        {{- $fk := . -}}
-        {{- if eq $dot.Table.Name .ForeignTable -}}
-          {{- range $table.UKeys -}}
-            {{- if setInclude $fk.Column .Columns }}
-              {table: "{{$fk.Table}}", objectIdColumn: "{{$fk.Column}}", columns: []string{`{{ .Columns | join "`,`" }}`}},
-            {{- end -}}
-          {{- end -}}
-        {{- end -}}
-      {{- end -}}
-    {{- end }}
-  }
-
-  err = mergeModels(tx, primaryID, secondaryID, foreignKeys, conflictingKeys)
-  if err != nil {
-    return err
-  }
-
-	pr := reflect.ValueOf(primary)
-	sr := reflect.ValueOf(secondary)
-	// for any column thats null on the primary and not null on the secondary, copy from secondary to primary
-	for i := 0; i < sr.Elem().NumField(); i++ {
-		pf := pr.Elem().Field(i)
-		sf := sr.Elem().Field(i)
-		if sf.IsValid() {
-			if nullable, ok := sf.Interface().(null.Nullable); ok && !nullable.IsNull() && pf.Interface().(null.Nullable).IsNull() {
-				pf.Set(sf)
-			}
-		}
-	}
-
-	err = primary.Update(tx)
-	if err != nil {
-		return err
-	}
-
-	err = secondary.Delete(tx)
-	if err != nil {
-		return err
-	}
-
-  return nil
-}
-
-// Merge combines two {{$tableNamePlural}} into one. The primary record will be kept, and the secondary will be deleted.
-func Merge{{$tableNamePlural}}G(primaryID uint64, secondaryID uint64) error {
-  return Merge{{$tableNamePlural}}(boil.GetDB(), primaryID, secondaryID)
-}
-{{- end -}}{{/* join table */}}
\ No newline at end of file
diff --git a/templates/singleton/boil_queries.tpl b/templates/singleton/boil_queries.tpl
index 3e9ebb8..d0879cb 100644
--- a/templates/singleton/boil_queries.tpl
+++ b/templates/singleton/boil_queries.tpl
@@ -2,7 +2,6 @@ var dialect = queries.Dialect{
 	LQ: 0x{{printf "%x" .Dialect.LQ}},
 	RQ: 0x{{printf "%x" .Dialect.RQ}},
 	IndexPlaceholders: {{.Dialect.IndexPlaceholders}},
-	UseTopClause: {{.Dialect.UseTopClause}},
 }
 
 // NewQueryG initializes a new Query using the passed in QueryMods
@@ -19,168 +18,3 @@ func NewQuery(exec boil.Executor, mods ...qm.QueryMod) *queries.Query {
 
 	return q
 }
-
-func mergeModels(tx boil.Executor, primaryID uint64, secondaryID uint64, foreignKeys []foreignKey, conflictingKeys []conflictingUniqueKey) error {
-	if len(foreignKeys) < 1 {
-		return nil
-	}
-	var err error
-
-	for _, conflict := range conflictingKeys {
-        if len(conflict.columns) == 1 && conflict.columns[0] == conflict.objectIdColumn {
-            err = deleteOneToOneConflictsBeforeMerge(tx, conflict, primaryID, secondaryID)
-        } else {
-            err = deleteOneToManyConflictsBeforeMerge(tx, conflict, primaryID, secondaryID)
-        }
-        if err != nil {
-            return err
-        }
-     }
-
-	for _, fk := range foreignKeys {
-		// TODO: use NewQuery here, not plain sql
-		query := fmt.Sprintf(
-			"UPDATE %s SET %s = %s WHERE %s = %s",
-			fk.foreignTable, fk.foreignColumn, strmangle.Placeholders(dialect.IndexPlaceholders, 1, 1, 1),
-			fk.foreignColumn, strmangle.Placeholders(dialect.IndexPlaceholders, 1, 2, 1),
-		)
-		_, err = tx.Exec(query, primaryID, secondaryID)
-		if err != nil {
-			return errors.Err(err)
-		}
-	}
-	return checkMerge(tx, foreignKeys)
-}
-
-func deleteOneToOneConflictsBeforeMerge(tx boil.Executor, conflict conflictingUniqueKey, primaryID uint64, secondaryID uint64) error {
-	query := fmt.Sprintf(
-		"SELECT COUNT(*) FROM %s WHERE %s IN (%s)",
-		conflict.table, conflict.objectIdColumn,
-		strmangle.Placeholders(dialect.IndexPlaceholders, 2, 1, 1),
-	)
-
-	var count int
-	err := tx.QueryRow(query, primaryID, secondaryID).Scan(&count)
-	if err != nil {
-		return errors.Err(err)
-	}
-
-	if count > 2 {
-		return errors.Err("it should not be possible to have more than two rows here")
-	} else if count != 2 {
-		return nil // no conflicting rows
-	}
-
-	query = fmt.Sprintf(
-		"DELETE FROM %s WHERE %s = %s",
-		conflict.table, conflict.objectIdColumn, strmangle.Placeholders(dialect.IndexPlaceholders, 1, 1, 1),
-	)
-
-	_, err = tx.Exec(query, secondaryID)
-	return errors.Err(err)
-}
-
-func deleteOneToManyConflictsBeforeMerge(tx boil.Executor, conflict conflictingUniqueKey, primaryID uint64, secondaryID uint64) error {
-	conflictingColumns := strmangle.SetComplement(conflict.columns, []string{conflict.objectIdColumn})
-
-	query := fmt.Sprintf(
-		"SELECT %s FROM %s WHERE %s IN (%s) GROUP BY %s HAVING count(distinct %s) > 1",
-		strings.Join(conflictingColumns, ","), conflict.table, conflict.objectIdColumn,
-		strmangle.Placeholders(dialect.IndexPlaceholders, 2, 1, 1),
-		strings.Join(conflictingColumns, ","), conflict.objectIdColumn,
-	)
-
-	//The selectParams should be the ObjectIDs to search for regarding the conflict.
-	rows, err := tx.Query(query, primaryID, secondaryID)
-	if err != nil {
-		return errors.Err(err)
-	}
-
-	//Since we don't don't know if advance how many columns the query returns, we have dynamically assign them to be
-	// used in the delete query.
-	colNames, err := rows.Columns()
-	if err != nil {
-		return errors.Err(err)
-	}
-	//Each row result of the query needs to be removed for being a conflicting row. Store each row's keys in an array.
-	var rowsToRemove = [][]interface{}(nil)
-	for rows.Next() {
-		//Set pointers for dynamic scan
-		iColPtrs := make([]interface{}, len(colNames))
-		for i := 0; i < len(colNames); i++ {
-			s := string("")
-			iColPtrs[i] = &s
-		}
-		//Dynamically scan n columns
-		err = rows.Scan(iColPtrs...)
-		if err != nil {
-			return errors.Err(err)
-		}
-		//Grab scanned values for query arguments
-		iCol := make([]interface{}, len(colNames))
-		for i, col := range iColPtrs {
-			x := col.(*string)
-			iCol[i] = *x
-		}
-		rowsToRemove = append(rowsToRemove, iCol)
-	}
-	defer rows.Close()
-
-	//This query will adjust dynamically depending on the number of conflicting keys, adding AND expressions for each
-	// key to ensure the right conflicting rows are deleted.
-	query = fmt.Sprintf(
-		"DELETE FROM %s %s",
-		conflict.table,
-		"WHERE "+strings.Join(conflict.columns, " = ? AND ")+" = ?",
-	)
-
-	//There could be multiple conflicting rows between ObjectIDs. In the SELECT query we grab each row and their column
-	// keys to be deleted here in a loop.
-	for _, rowToDelete := range rowsToRemove {
-		rowToDelete = append(rowToDelete, secondaryID)
-		_, err = tx.Exec(query, rowToDelete...)
-		if err != nil {
-			return errors.Err(err)
-		}
-	}
-	return nil
-}
-
-func checkMerge(tx boil.Executor, foreignKeys []foreignKey) error {
-	uniqueColumns := []interface{}{}
-	uniqueColumnNames := map[string]bool{}
-	handledTablesColumns := map[string]bool{}
-
-	for _, fk := range foreignKeys {
-		handledTablesColumns[fk.foreignTable+"."+fk.foreignColumn] = true
-		if _, ok := uniqueColumnNames[fk.foreignColumn]; !ok {
-			uniqueColumns = append(uniqueColumns, fk.foreignColumn)
-			uniqueColumnNames[fk.foreignColumn] = true
-		}
-	}
-
-	q := fmt.Sprintf(
-		`SELECT table_name, column_name FROM INFORMATION_SCHEMA.COLUMNS WHERE TABLE_SCHEMA=DATABASE() AND column_name IN (%s)`,
-		strmangle.Placeholders(dialect.IndexPlaceholders, len(uniqueColumns), 1, 1),
-	)
-	rows, err := tx.Query(q, uniqueColumns...)
-	defer rows.Close()
-	if err != nil {
-		return errors.Err(err)
-	}
-
-	for rows.Next() {
-		var tableName string
-		var columnName string
-		err = rows.Scan(&tableName, &columnName)
-		if err != nil {
-			return errors.Err(err)
-		}
-
-		if _, exists := handledTablesColumns[tableName+"."+columnName]; !exists {
-			return errors.Err("missing merge for " + tableName + "." + columnName)
-		}
-	}
-
-	return nil
-}
diff --git a/templates/singleton/boil_table_names.tpl b/templates/singleton/boil_table_names.tpl
deleted file mode 100644
index 062bcad..0000000
--- a/templates/singleton/boil_table_names.tpl
+++ /dev/null
@@ -1,9 +0,0 @@
-var TableNames = struct {
-	{{range $table := .Tables -}}
-	{{titleCase $table.Name}} string
-	{{end -}}
-}{
-	{{range $table := .Tables -}}
-	{{titleCase $table.Name}}: "{{$table.Name}}",
-	{{end -}}
-}
diff --git a/templates/singleton/boil_types.tpl b/templates/singleton/boil_types.tpl
index 48a85e2..9bf13e8 100644
--- a/templates/singleton/boil_types.tpl
+++ b/templates/singleton/boil_types.tpl
@@ -1,26 +1,10 @@
 // M type is for providing columns and column values to UpdateAll.
 type M map[string]interface{}
 
-// foreignKey connects two tables. When merging records, foreign keys from secondary record must
-// be reassigned to primary record.
-type foreignKey struct {
-	foreignTable  string
-	foreignColumn string
-}
-
-// conflictingUniqueKey records a merge conflict. If two rows exist with the same value in the
-// conflicting column for two records being merged, one row must be deleted.
-type conflictingUniqueKey struct {
-	table          string
-	objectIdColumn string
-	columns        []string
-}
-
-
 // ErrSyncFail occurs during insert when the record could not be retrieved in
 // order to populate default value information. This usually happens when LastInsertId
 // fails or there was a primary key configuration that was not resolvable.
-var ErrSyncFail = errors.Base("{{.PkgName}}: failed to synchronize data after insert")
+var ErrSyncFail = errors.New("{{.PkgName}}: failed to synchronize data after insert")
 
 type insertCache struct {
 	query        string
diff --git a/boilingcore/templates_test.go b/templates_test.go
similarity index 98%
rename from boilingcore/templates_test.go
rename to templates_test.go
index 25e4777..df45e1e 100644
--- a/boilingcore/templates_test.go
+++ b/templates_test.go
@@ -1,4 +1,4 @@
-package boilingcore
+package main
 
 import (
 	"sort"
diff --git a/templates_test/delete.tpl b/templates_test/delete.tpl
index d548efa..f745ea4 100644
--- a/templates_test/delete.tpl
+++ b/templates_test/delete.tpl
@@ -8,7 +8,7 @@ func test{{$tableNamePlural}}Delete(t *testing.T) {
 	seed := randomize.NewSeed()
 	var err error
 	{{$varNameSingular}} := &{{$tableNameSingular}}{}
-	if err = randomize.Struct(seed, {{$varNameSingular}}, {{$varNameSingular}}DBTypes, true, {{$varNameSingular}}ColumnsWithDefault...); err != nil {
+	if err = randomize.Struct(seed, {{$varNameSingular}}, {{$varNameSingular}}DBTypes, true); err != nil {
 		t.Errorf("Unable to randomize {{$tableNameSingular}} struct: %s", err)
 	}
 
@@ -38,7 +38,7 @@ func test{{$tableNamePlural}}QueryDeleteAll(t *testing.T) {
 	seed := randomize.NewSeed()
 	var err error
 	{{$varNameSingular}} := &{{$tableNameSingular}}{}
-	if err = randomize.Struct(seed, {{$varNameSingular}}, {{$varNameSingular}}DBTypes, true, {{$varNameSingular}}ColumnsWithDefault...); err != nil {
+	if err = randomize.Struct(seed, {{$varNameSingular}}, {{$varNameSingular}}DBTypes, true); err != nil {
 		t.Errorf("Unable to randomize {{$tableNameSingular}} struct: %s", err)
 	}
 
@@ -68,7 +68,7 @@ func test{{$tableNamePlural}}SliceDeleteAll(t *testing.T) {
 	seed := randomize.NewSeed()
 	var err error
 	{{$varNameSingular}} := &{{$tableNameSingular}}{}
-	if err = randomize.Struct(seed, {{$varNameSingular}}, {{$varNameSingular}}DBTypes, true, {{$varNameSingular}}ColumnsWithDefault...); err != nil {
+	if err = randomize.Struct(seed, {{$varNameSingular}}, {{$varNameSingular}}DBTypes, true); err != nil {
 		t.Errorf("Unable to randomize {{$tableNameSingular}} struct: %s", err)
 	}
 
diff --git a/templates_test/finishers.tpl b/templates_test/finishers.tpl
index fa8b129..b3ba672 100644
--- a/templates_test/finishers.tpl
+++ b/templates_test/finishers.tpl
@@ -95,6 +95,15 @@ func test{{$tableNamePlural}}Count(t *testing.T) {
 
 	tx := MustTx(boil.Begin())
 	defer tx.Rollback()
+	count, err := {{$tableNamePlural}}(tx).Count()
+	if err != nil {
+		t.Error(err)
+	}
+
+	if count != 0 {
+		t.Error("want 0 records found")
+	}
+
 	if err = {{$varNameSingular}}One.Insert(tx); err != nil {
 		t.Error(err)
 	}
@@ -102,7 +111,7 @@ func test{{$tableNamePlural}}Count(t *testing.T) {
 		t.Error(err)
 	}
 
-	count, err := {{$tableNamePlural}}(tx).Count()
+	count, err = {{$tableNamePlural}}(tx).Count()
 	if err != nil {
 		t.Error(err)
 	}
@@ -111,3 +120,38 @@ func test{{$tableNamePlural}}Count(t *testing.T) {
 		t.Error("want 2 records, got:", count)
 	}
 }
+
+func test{{$tableNamePlural}}ExistsFinisher(t *testing.T) {
+	t.Parallel()
+
+	var err error
+	seed := randomize.NewSeed()
+	{{$varNameSingular}}One := &{{$tableNameSingular}}{}
+	if err = randomize.Struct(seed, {{$varNameSingular}}One, {{$varNameSingular}}DBTypes, false, {{$varNameSingular}}ColumnsWithDefault...); err != nil {
+		t.Errorf("Unable to randomize {{$tableNameSingular}} struct: %s", err)
+	}
+
+	tx := MustTx(boil.Begin())
+	defer tx.Rollback()
+	exists, err := {{$tableNamePlural}}(tx).Exists()
+	if err != nil {
+		t.Error(err)
+	}
+
+	if exists {
+		t.Error("the record should not exist")
+	}
+
+	if err = {{$varNameSingular}}One.Insert(tx); err != nil {
+		t.Error(err)
+	}
+
+	exists, err = {{$tableNamePlural}}(tx).Exists()
+	if err != nil {
+		t.Error(err)
+	}
+
+	if !exists {
+		t.Error("wanted record to exist")
+	}
+}
diff --git a/templates_test/insert.tpl b/templates_test/insert.tpl
index e3446f0..d14a0c8 100644
--- a/templates_test/insert.tpl
+++ b/templates_test/insert.tpl
@@ -41,7 +41,7 @@ func test{{$tableNamePlural}}InsertWhitelist(t *testing.T) {
 
 	tx := MustTx(boil.Begin())
 	defer tx.Rollback()
-	if err = {{$varNameSingular}}.Insert(tx, {{$varNameSingular}}ColumnsWithoutDefault...); err != nil {
+	if err = {{$varNameSingular}}.Insert(tx, {{$varNameSingular}}Columns...); err != nil {
 		t.Error(err)
 	}
 
diff --git a/templates_test/main_test/mssql_main.tpl b/templates_test/main_test/mssql_main.tpl
deleted file mode 100644
index 5b0b5e1..0000000
--- a/templates_test/main_test/mssql_main.tpl
+++ /dev/null
@@ -1,131 +0,0 @@
-type mssqlTester struct {
-	dbConn     *sql.DB
-	dbName     string
-	host       string
-	user       string
-	pass       string
-	sslmode    string
-	port       int
-	testDBName string
-}
-
-func init() {
-	dbMain = &mssqlTester{}
-}
-
-func (m *mssqlTester) setup() error {
-	var err error
-	m.dbName = viper.GetString("mssql.dbname")
-	m.host = viper.GetString("mssql.host")
-	m.user = viper.GetString("mssql.user")
-	m.pass = viper.GetString("mssql.pass")
-	m.port = viper.GetInt("mssql.port")
-	m.sslmode = viper.GetString("mssql.sslmode")
-	// Create a randomized db name.
-	m.testDBName = randomize.StableDBName(m.dbName)
-
-	if err = m.dropTestDB(); err != nil {
-		return errors.Err(err)
-	}
-	if err = m.createTestDB(); err != nil {
-		return errors.Err(err)
-	}
-
-	createCmd := exec.Command("sqlcmd", "-S", m.host, "-U", m.user, "-P", m.pass, "-d", m.testDBName)
-
-	f, err := os.Open("tables_schema.sql")
-	if err != nil {
-		return errors.Prefix("failed to open tables_schema.sql file", err)
-	}
-
-	defer f.Close()
-
-	createCmd.Stdin = newFKeyDestroyer(rgxMSSQLkey, f)
-
-	if err = createCmd.Start(); err != nil {
-		return errors.Prefix("failed to start sqlcmd command", err)
-	}
-
-	if err = createCmd.Wait(); err != nil {
-		fmt.Println(err)
-		return errors.Prefix("failed to wait for sqlcmd command", err)
-	}
-
-	return nil
-}
-
-func (m *mssqlTester) sslMode(mode string) string {
-	switch mode {
-	case "true":
-		return "true"
-	case "false":
-		return "false"
-	default:
-		return "disable"
-	}
-}
-
-func (m *mssqlTester) createTestDB() error {
-	sql := fmt.Sprintf(`
-	CREATE DATABASE %s;
-	GO
-	ALTER DATABASE %[1]s
-	SET READ_COMMITTED_SNAPSHOT ON;
-	GO`, m.testDBName)
-	return m.runCmd(sql, "sqlcmd", "-S", m.host, "-U", m.user, "-P", m.pass)
-}
-
-func (m *mssqlTester) dropTestDB() error {
-	// Since MS SQL 2016 it can be done with
-	// DROP DATABASE [ IF EXISTS ] { database_name | database_snapshot_name } [ ,...n ] [;]
-	sql := fmt.Sprintf(`
-	IF EXISTS(SELECT name FROM sys.databases 
-		WHERE name = '%s')
-		DROP DATABASE %s
-	GO`, m.testDBName, m.testDBName)
-	return m.runCmd(sql, "sqlcmd", "-S", m.host, "-U", m.user, "-P", m.pass)
-}
-
-func (m *mssqlTester) teardown() error {
-	if m.dbConn != nil {
-		m.dbConn.Close()
-	}
-
-	if err := m.dropTestDB(); err != nil {
-		return errors.Err(err)
-	}
-
-	return nil
-}
-
-func (m *mssqlTester) runCmd(stdin, command string, args ...string) error {
-	cmd := exec.Command(command, args...)
-	cmd.Stdin = strings.NewReader(stdin)
-
-	stdout := &bytes.Buffer{}
-	stderr := &bytes.Buffer{}
-	cmd.Stdout = stdout
-	cmd.Stderr = stderr
-	if err := cmd.Run(); err != nil {
-		fmt.Println("failed running:", command, args)
-		fmt.Println(stdout.String())
-		fmt.Println(stderr.String())
-		return errors.Err(err)
-	}
-
-	return nil
-}
-
-func (m *mssqlTester) conn() (*sql.DB, error) {
-	if m.dbConn != nil {
-		return m.dbConn, nil
-	}
-
-	var err error
-	m.dbConn, err = sql.Open("mssql", drivers.MSSQLBuildQueryString(m.user, m.pass, m.testDBName, m.host, m.port, m.sslmode))
-	if err != nil {
-		return nil, err
-	}
-
-	return m.dbConn, nil
-}
diff --git a/templates_test/main_test/mysql_main.tpl b/templates_test/main_test/mysql_main.tpl
index ef735e2..fc43d3d 100644
--- a/templates_test/main_test/mysql_main.tpl
+++ b/templates_test/main_test/mysql_main.tpl
@@ -30,14 +30,14 @@ func (m *mysqlTester) setup() error {
 	m.testDBName = randomize.StableDBName(m.dbName)
 
 	if err = m.makeOptionFile(); err != nil {
-		return errors.Prefix("couldn't make option file", err)
+		return errors.Wrap(err, "couldn't make option file")
 	}
 
 	if err = m.dropTestDB(); err != nil {
-		return errors.Err(err)
+		return err
 	}
 	if err = m.createTestDB(); err != nil {
-		return errors.Err(err)
+		return err
 	}
 
 	dumpCmd := exec.Command("mysqldump", m.defaultsFile(), "--no-data", m.dbName)
@@ -48,22 +48,22 @@ func (m *mysqlTester) setup() error {
 	createCmd.Stdin = newFKeyDestroyer(rgxMySQLkey, r)
 
 	if err = dumpCmd.Start(); err != nil {
-		return errors.Prefix("failed to start mysqldump command", err)
+		return errors.Wrap(err, "failed to start mysqldump command")
 	}
 	if err = createCmd.Start(); err != nil {
-		return errors.Prefix("failed to start mysql command", err)
+		return errors.Wrap(err, "failed to start mysql command")
 	}
 
 	if err = dumpCmd.Wait(); err != nil {
 		fmt.Println(err)
-		return errors.Prefix("failed to wait for mysqldump command", err)
+		return errors.Wrap(err, "failed to wait for mysqldump command")
 	}
 
 	w.Close() // After dumpCmd is done, close the write end of the pipe
 
 	if err = createCmd.Wait(); err != nil {
 		fmt.Println(err)
-		return errors.Prefix("failed to wait for mysql command", err)
+		return errors.Wrap(err, "failed to wait for mysql command")
 	}
 
 	return nil
@@ -87,15 +87,7 @@ func (m *mysqlTester) defaultsFile() string {
 func (m *mysqlTester) makeOptionFile() error {
 	tmp, err := ioutil.TempFile("", "optionfile")
 	if err != nil {
-		return errors.Prefix("failed to create option file", err)
-	}
-
-	isTCP := false
-	_, err = os.Stat(m.host)
-	if os.IsNotExist(err) {
-		isTCP = true
-	} else if err != nil {
-		return errors.Prefix("could not stat m.host", err)
+		return errors.Wrap(err, "failed to create option file")
 	}
 
 	fmt.Fprintln(tmp, "[client]")
@@ -104,9 +96,6 @@ func (m *mysqlTester) makeOptionFile() error {
 	fmt.Fprintf(tmp, "user=%s\n", m.user)
 	fmt.Fprintf(tmp, "password=%s\n", m.pass)
 	fmt.Fprintf(tmp, "ssl-mode=%s\n", m.sslMode(m.sslmode))
-	if isTCP {
-		fmt.Fprintln(tmp, "protocol=tcp")
-	}
 
 	fmt.Fprintln(tmp, "[mysqldump]")
 	fmt.Fprintf(tmp, "host=%s\n", m.host)
@@ -114,9 +103,6 @@ func (m *mysqlTester) makeOptionFile() error {
 	fmt.Fprintf(tmp, "user=%s\n", m.user)
 	fmt.Fprintf(tmp, "password=%s\n", m.pass)
 	fmt.Fprintf(tmp, "ssl-mode=%s\n", m.sslMode(m.sslmode))
-	if isTCP {
-		fmt.Fprintln(tmp, "protocol=tcp")
-	}
 
 	m.optionFile = tmp.Name()
 
@@ -139,7 +125,7 @@ func (m *mysqlTester) teardown() error {
 	}
 
 	if err := m.dropTestDB(); err != nil {
-		return errors.Err(err)
+		return err
 	}
 
 	return os.Remove(m.optionFile)
@@ -159,7 +145,7 @@ func (m *mysqlTester) runCmd(stdin, command string, args ...string) error {
 	fmt.Println("failed running:", command, args)
 	fmt.Println(stdout.String())
 	fmt.Println(stderr.String())
-	return errors.Err(err)
+	return err
 	}
 
 	return nil
diff --git a/templates_test/main_test/postgres_main.tpl b/templates_test/main_test/postgres_main.tpl
index 3110325..0abcba3 100644
--- a/templates_test/main_test/postgres_main.tpl
+++ b/templates_test/main_test/postgres_main.tpl
@@ -33,14 +33,14 @@ func (p *pgTester) setup() error {
   p.testDBName = randomize.StableDBName(p.dbName)
 
   if err = p.makePGPassFile(); err != nil {
-    return errors.Err(err)
+    return err
   }
 
   if err = p.dropTestDB(); err != nil {
-    return errors.Err(err)
+    return err
   }
   if err = p.createTestDB(); err != nil {
-    return errors.Err(err)
+    return err
   }
 
   dumpCmd := exec.Command("pg_dump", "--schema-only", p.dbName)
@@ -53,22 +53,22 @@ func (p *pgTester) setup() error {
   createCmd.Stdin = newFKeyDestroyer(rgxPGFkey, r)
 
   if err = dumpCmd.Start(); err != nil {
-    return errors.Prefix("failed to start pg_dump command", err)
+    return errors.Wrap(err, "failed to start pg_dump command")
   }
   if err = createCmd.Start(); err != nil {
-    return errors.Prefix("failed to start psql command", err)
+    return errors.Wrap(err, "failed to start psql command")
   }
 
   if err = dumpCmd.Wait(); err != nil {
     fmt.Println(err)
-    return errors.Prefix("failed to wait for pg_dump command", err)
+    return errors.Wrap(err, "failed to wait for pg_dump command")
   }
 
   w.Close() // After dumpCmd is done, close the write end of the pipe
 
   if err = createCmd.Wait(); err != nil {
     fmt.Println(err)
-    return errors.Prefix("failed to wait for psql command", err)
+    return errors.Wrap(err, "failed to wait for psql command")
   }
 
   return nil
@@ -90,7 +90,7 @@ func (p *pgTester) runCmd(stdin, command string, args ...string) error {
     fmt.Println("failed running:", command, args)
     fmt.Println(stdout.String())
     fmt.Println(stderr.String())
-    return errors.Err(err)
+    return err
   }
 
   return nil
@@ -108,7 +108,7 @@ func (p *pgTester) pgEnv() []string {
 func (p *pgTester) makePGPassFile() error {
   tmp, err := ioutil.TempFile("", "pgpass")
   if err != nil {
-    return errors.Prefix("failed to create option file", err)
+    return errors.Wrap(err, "failed to create option file")
   }
 
   fmt.Fprintf(tmp, "%s:%d:postgres:%s", p.host, p.port, p.user)
@@ -145,12 +145,12 @@ func (p *pgTester) dropTestDB() error {
 func (p *pgTester) teardown() error {
   var err error
   if err = p.dbConn.Close(); err != nil {
-    return errors.Err(err)
+    return err
   }
   p.dbConn = nil
 
   if err = p.dropTestDB(); err != nil {
-    return errors.Err(err)
+    return err
   }
 
   return os.Remove(p.pgPassFile)
diff --git a/templates_test/relationship_one_to_one.tpl b/templates_test/relationship_one_to_one.tpl
index 27c13ef..4e253e5 100644
--- a/templates_test/relationship_one_to_one.tpl
+++ b/templates_test/relationship_one_to_one.tpl
@@ -50,7 +50,7 @@ func test{{$txt.LocalTable.NameGo}}OneToOne{{$txt.ForeignTable.NameGo}}Using{{$t
 	}
 
 	slice := {{$txt.LocalTable.NameGo}}Slice{&local}
-	if err = local.L.Load{{$txt.Function.Name}}(tx, false, (*[]*{{$txt.LocalTable.NameGo}})(&slice)); err != nil {
+	if err = local.L.Load{{$txt.Function.Name}}(tx, false, &slice); err != nil {
 		t.Fatal(err)
 	}
 	if local.R.{{$txt.Function.Name}} == nil {
diff --git a/templates_test/relationship_to_many.tpl b/templates_test/relationship_to_many.tpl
index 0402609..d23984e 100644
--- a/templates_test/relationship_to_many.tpl
+++ b/templates_test/relationship_to_many.tpl
@@ -28,7 +28,7 @@ func test{{$txt.LocalTable.NameGo}}ToMany{{$txt.Function.Name}}(t *testing.T) {
 	{{if .Nullable -}}
 	a.{{.Column | titleCase}}.Valid = true
 	{{- end}}
-	{{- if .ForeignColumnNullable}}
+	{{- if .ForeignColumnNullable -}}
 	b.{{.ForeignColumn | titleCase}}.Valid = true
 	c.{{.ForeignColumn | titleCase}}.Valid = true
 	{{- end}}
@@ -87,7 +87,7 @@ func test{{$txt.LocalTable.NameGo}}ToMany{{$txt.Function.Name}}(t *testing.T) {
 	}
 
 	slice := {{$txt.LocalTable.NameGo}}Slice{&a}
-	if err = a.L.Load{{$txt.Function.Name}}(tx, false, (*[]*{{$txt.LocalTable.NameGo}})(&slice)); err != nil {
+	if err = a.L.Load{{$txt.Function.Name}}(tx, false, &slice); err != nil {
 		t.Fatal(err)
 	}
 	if got := len(a.R.{{$txt.Function.Name}}); got != 2 {
diff --git a/templates_test/relationship_to_many_setops.tpl b/templates_test/relationship_to_many_setops.tpl
index f66ba8a..01471f4 100644
--- a/templates_test/relationship_to_many_setops.tpl
+++ b/templates_test/relationship_to_many_setops.tpl
@@ -159,16 +159,12 @@ func test{{$txt.LocalTable.NameGo}}ToManySetOp{{$txt.Function.Name}}(t *testing.
 
 	{{- if .ToJoinTable}}
 
-	// The following checks cannot be implemented since we have no handle
-	// to these when we call Set(). Leaving them here as wishful thinking
-	// and to let people know there's dragons.
-	//
-	// if len(b.R.{{$txt.Function.ForeignName}}) != 0 {
-	// 	t.Error("relationship was not removed properly from the slice")
-	// }
-	// if len(c.R.{{$txt.Function.ForeignName}}) != 0 {
-	// 	t.Error("relationship was not removed properly from the slice")
-	// }
+	if len(b.R.{{$txt.Function.ForeignName}}) != 0 {
+		t.Error("relationship was not removed properly from the slice")
+	}
+	if len(c.R.{{$txt.Function.ForeignName}}) != 0 {
+		t.Error("relationship was not removed properly from the slice")
+	}
 	if d.R.{{$txt.Function.ForeignName}}[0] != &a {
 		t.Error("relationship was not added properly to the slice")
 	}
diff --git a/templates_test/relationship_to_one.tpl b/templates_test/relationship_to_one.tpl
index 9211284..3c66bf7 100644
--- a/templates_test/relationship_to_one.tpl
+++ b/templates_test/relationship_to_one.tpl
@@ -13,10 +13,10 @@ func test{{$txt.LocalTable.NameGo}}ToOne{{$txt.ForeignTable.NameGo}}Using{{$txt.
 	var foreign {{$txt.ForeignTable.NameGo}}
 
 	seed := randomize.NewSeed()
-	if err := randomize.Struct(seed, &local, {{$varNameSingular}}DBTypes, {{if .Nullable}}true{{else}}false{{end}}, {{$varNameSingular}}ColumnsWithDefault...); err != nil {
+	if err := randomize.Struct(seed, &local, {{$varNameSingular}}DBTypes, true, {{$varNameSingular}}ColumnsWithDefault...); err != nil {
 		t.Errorf("Unable to randomize {{$txt.LocalTable.NameGo}} struct: %s", err)
 	}
-	if err := randomize.Struct(seed, &foreign, {{$foreignVarNameSingular}}DBTypes, {{if .ForeignColumnNullable}}true{{else}}false{{end}}, {{$foreignVarNameSingular}}ColumnsWithDefault...); err != nil {
+	if err := randomize.Struct(seed, &foreign, {{$foreignVarNameSingular}}DBTypes, true, {{$foreignVarNameSingular}}ColumnsWithDefault...); err != nil {
 		t.Errorf("Unable to randomize {{$txt.ForeignTable.NameGo}} struct: %s", err)
 	}
 
@@ -50,7 +50,7 @@ func test{{$txt.LocalTable.NameGo}}ToOne{{$txt.ForeignTable.NameGo}}Using{{$txt.
 	}
 
 	slice := {{$txt.LocalTable.NameGo}}Slice{&local}
-	if err = local.L.Load{{$txt.Function.Name}}(tx, false, (*[]*{{$txt.LocalTable.NameGo}})(&slice)); err != nil {
+	if err = local.L.Load{{$txt.Function.Name}}(tx, false, &slice); err != nil {
 		t.Fatal(err)
 	}
 	if local.R.{{$txt.Function.Name}} == nil {
diff --git a/templates_test/singleton/boil_main_test.tpl b/templates_test/singleton/boil_main_test.tpl
index 3cf466d..0014a1e 100644
--- a/templates_test/singleton/boil_main_test.tpl
+++ b/templates_test/singleton/boil_main_test.tpl
@@ -104,12 +104,6 @@ func setConfigDefaults() {
 	if viper.GetInt("mysql.port") == 0 {
 		viper.Set("mysql.port", 3306)
 	}
-	if viper.GetString("mssql.sslmode") == "" {
-		viper.Set("mssql.sslmode", "true")
-	}
-	if viper.GetInt("mssql.port") == 0 {
-		viper.Set("mssql.port", 1433)
-	}
 }
 
 func validateConfig(driverName string) error {
@@ -133,15 +127,5 @@ func validateConfig(driverName string) error {
 		).Check()
 	}
 
-	if driverName == "mssql" {
-		return vala.BeginValidation().Validate(
-			vala.StringNotEmpty(viper.GetString("mssql.user"), "mssql.user"),
-			vala.StringNotEmpty(viper.GetString("mssql.host"), "mssql.host"),
-			vala.Not(vala.Equals(viper.GetInt("mssql.port"), 0, "mssql.port")),
-			vala.StringNotEmpty(viper.GetString("mssql.dbname"), "mssql.dbname"),
-			vala.StringNotEmpty(viper.GetString("mssql.sslmode"), "mssql.sslmode"),
-		).Check()
-	}
-
-	return errors.Err("not a valid driver name")
+	return errors.New("not a valid driver name")
 }
diff --git a/templates_test/singleton/boil_queries_test.tpl b/templates_test/singleton/boil_queries_test.tpl
index 45ff788..bd41389 100644
--- a/templates_test/singleton/boil_queries_test.tpl
+++ b/templates_test/singleton/boil_queries_test.tpl
@@ -9,7 +9,6 @@ func MustTx(transactor boil.Transactor, err error) boil.Transactor {
 
 var rgxPGFkey = regexp.MustCompile(`(?m)^ALTER TABLE ONLY .*\n\s+ADD CONSTRAINT .*? FOREIGN KEY .*?;\n`)
 var rgxMySQLkey = regexp.MustCompile(`(?m)((,\n)?\s+CONSTRAINT.*?FOREIGN KEY.*?\n)+`)
-var rgxMSSQLkey = regexp.MustCompile(`(?m)^ALTER TABLE .*ADD\s+CONSTRAINT .* FOREIGN KEY.*?.*\n?REFERENCES.*`)
 
 func newFKeyDestroyer(regex *regexp.Regexp, reader io.Reader) io.Reader {
 	return &fKeyDestroyer{
diff --git a/templates_test/singleton/boil_suites_test.tpl b/templates_test/singleton/boil_suites_test.tpl
index 08a5e21..bf105a3 100644
--- a/templates_test/singleton/boil_suites_test.tpl
+++ b/templates_test/singleton/boil_suites_test.tpl
@@ -105,6 +105,16 @@ func TestCount(t *testing.T) {
   {{- end -}}
 }
 
+func TestExistsFinisher(t *testing.T) {
+  {{- range $index, $table := .Tables}}
+  {{- if $table.IsJoinTable -}}
+  {{- else -}}
+  {{- $tableName := $table.Name | plural | titleCase -}}
+  t.Run("{{$tableName}}", test{{$tableName}}ExistsFinisher)
+  {{end -}}
+  {{- end -}}
+}
+
 {{if not .NoHooks -}}
 func TestHooks(t *testing.T) {
   {{- range $index, $table := .Tables}}
@@ -251,7 +261,7 @@ func TestToManySet(t *testing.T) {
     {{- if $table.IsJoinTable -}}
     {{- else -}}
       {{- range $table.ToManyRelationships -}}
-        {{- if not (or .ForeignColumnNullable .ToJoinTable)}}
+        {{- if not .ForeignColumnNullable -}}
         {{- else -}}
           {{- $txt := txtsFromToMany $dot.Tables $table . -}}
     t.Run("{{$txt.LocalTable.NameGo}}To{{$txt.Function.Name}}", test{{$txt.LocalTable.NameGo}}ToManySetOp{{$txt.Function.Name}})
@@ -268,7 +278,7 @@ func TestToManyRemove(t *testing.T) {
     {{- if $table.IsJoinTable -}}
     {{- else -}}
       {{- range $table.ToManyRelationships -}}
-        {{- if not (or .ForeignColumnNullable .ToJoinTable)}}
+        {{- if not .ForeignColumnNullable -}}
         {{- else -}}
           {{- $txt := txtsFromToMany $dot.Tables $table . -}}
     t.Run("{{$txt.LocalTable.NameGo}}To{{$txt.Function.Name}}", test{{$txt.LocalTable.NameGo}}ToManyRemoveOp{{$txt.Function.Name}})
diff --git a/templates_test/update.tpl b/templates_test/update.tpl
index d04401a..b33854d 100644
--- a/templates_test/update.tpl
+++ b/templates_test/update.tpl
@@ -12,7 +12,7 @@ func test{{$tableNamePlural}}Update(t *testing.T) {
 	seed := randomize.NewSeed()
 	var err error
 	{{$varNameSingular}} := &{{$tableNameSingular}}{}
-	if err = randomize.Struct(seed, {{$varNameSingular}}, {{$varNameSingular}}DBTypes, true, {{$varNameSingular}}ColumnsWithDefault...); err != nil {
+	if err = randomize.Struct(seed, {{$varNameSingular}}, {{$varNameSingular}}DBTypes, true); err != nil {
 		t.Errorf("Unable to randomize {{$tableNameSingular}} struct: %s", err)
 	}
 
@@ -50,7 +50,7 @@ func test{{$tableNamePlural}}SliceUpdateAll(t *testing.T) {
 	seed := randomize.NewSeed()
 	var err error
 	{{$varNameSingular}} := &{{$tableNameSingular}}{}
-	if err = randomize.Struct(seed, {{$varNameSingular}}, {{$varNameSingular}}DBTypes, true, {{$varNameSingular}}ColumnsWithDefault...); err != nil {
+	if err = randomize.Struct(seed, {{$varNameSingular}}, {{$varNameSingular}}DBTypes, true); err != nil {
 		t.Errorf("Unable to randomize {{$tableNameSingular}} struct: %s", err)
 	}
 
@@ -82,12 +82,6 @@ func test{{$tableNamePlural}}SliceUpdateAll(t *testing.T) {
 			{{$varNameSingular}}Columns,
 			{{$varNameSingular}}PrimaryKeyColumns,
 		)
-		{{- if eq .DriverName "mssql"}}
-		fields = strmangle.SetComplement(
-			fields,
-			{{$varNameSingular}}ColumnsWithAuto,
-		)
-		{{- end}}
 	}
 
 	value := reflect.Indirect(reflect.ValueOf({{$varNameSingular}}))
diff --git a/testdata/Dockerfile b/testdata/Dockerfile
deleted file mode 100644
index 034cedc..0000000
--- a/testdata/Dockerfile
+++ /dev/null
@@ -1,35 +0,0 @@
-# This Dockerfile builds the image used for CI/testing.
-FROM ubuntu:16.04
-
-ENV PATH /usr/local/sbin:/usr/local/bin:/usr/sbin:/usr/bin:/sbin:/bin:/usr/local/go/bin:/opt/mssql-tools/bin
-ENV GODIST go1.8.linux-amd64.tar.gz
-
-# Set up locales for sqlcmd (otherwise it breaks)
-RUN locale-gen en_US.UTF-8 \
-    && echo "LC_ALL=en_US.UTF-8" >> /etc/default/locale \
-    && echo "LANG=en_US.UTF-8" >> /etc/default/locale
-
-# Install bootstrap-y tools
-RUN apt-get update \
-    && apt-get install -y apt-transport-https software-properties-common python3-software-properties \
-    && apt-add-repository ppa:git-core/ppa \
-    && apt-get update \
-    && apt-get install -y curl git
-
-# Install database clients
-# MySQL 8.0 is still in development, so we're using 5.7 which is already
-# available in Ubuntu 16.04
-RUN curl https://www.postgresql.org/media/keys/ACCC4CF8.asc | apt-key add - \
-    && echo 'deb http://apt.postgresql.org/pub/repos/apt/ xenial-pgdg main' > /etc/apt/sources.list.d/psql.list \
-    && curl https://packages.microsoft.com/keys/microsoft.asc | apt-key add - \
-    && curl https://packages.microsoft.com/config/ubuntu/16.04/prod.list > /etc/apt/sources.list.d/msprod.list \
-    && apt-get update \
-    && env ACCEPT_EULA=Y apt-get install -y git postgresql-client-9.6 mysql-client-5.7 mssql-tools unixodbc-dev
-
-# Install Go
-RUN curl -o $GODIST https://storage.googleapis.com/golang/$GODIST \
-    && rm -rf /usr/local/go \
-    && tar -C /usr/local -xzf $GODIST
-
-RUN go get -u -v github.com/jstemmer/go-junit-report \
-    && mv /root/go/bin/go-junit-report /usr/bin/go-junit-report
diff --git a/testdata/mssql_test_schema.sql b/testdata/mssql_test_schema.sql
deleted file mode 100644
index 0995aa1..0000000
--- a/testdata/mssql_test_schema.sql
+++ /dev/null
@@ -1,439 +0,0 @@
-CREATE TABLE magic
-(
-  id int NOT NULL IDENTITY (1,1) PRIMARY KEY,
-  id_two int NOT NULL,
-  id_three int,
-  bit_zero bit,
-  bit_one bit NULL,
-  bit_two bit NOT NULL,
-  bit_three bit NULL DEFAULT 0,
-  bit_four bit NULL DEFAULT 1,
-  bit_five bit NOT NULL DEFAULT 0,
-  bit_six bit NOT NULL DEFAULT 1,
-  string_zero VARCHAR(1),
-  string_one VARCHAR(1) NULL,
-  string_two VARCHAR(1) NOT NULL,
-  string_three VARCHAR(1) NULL DEFAULT 'a',
-  string_four VARCHAR(1) NOT NULL DEFAULT 'b',
-  string_five VARCHAR(1000),
-  string_six VARCHAR(1000) NULL,
-  string_seven VARCHAR(1000) NOT NULL,
-  string_eight VARCHAR(1000) NULL DEFAULT 'abcdefgh',
-  string_nine VARCHAR(1000) NOT NULL DEFAULT 'abcdefgh',
-  string_ten VARCHAR(1000) NULL DEFAULT '',
-  string_eleven VARCHAR(1000) NOT NULL DEFAULT '',
-  big_int_zero bigint,
-  big_int_one bigint NULL,
-  big_int_two bigint NOT NULL,
-  big_int_three bigint NULL DEFAULT 111111,
-  big_int_four bigint NOT NULL DEFAULT 222222,
-  big_int_five bigint NULL DEFAULT 0,
-  big_int_six bigint NOT NULL DEFAULT 0,
-  int_zero int,
-  int_one int NULL,
-  int_two int NOT NULL,
-  int_three int NULL DEFAULT 333333,
-  int_four int NOT NULL DEFAULT 444444,
-  int_five int NULL DEFAULT 0,
-  int_six int NOT NULL DEFAULT 0,
-  float_zero float,
-  float_one float,
-  float_two float(24),
-  float_three float(24),
-  float_four float(24) NULL,
-  float_five float(24) NOT NULL,
-  float_six float(24) NULL DEFAULT 1.1,
-  float_seven float(24) NOT NULL DEFAULT 1.1,
-  float_eight float(24) NULL DEFAULT 0.0,
-  float_nine float(24) NULL DEFAULT 0.0,
-  bytea_zero binary NOT NULL,
-  bytea_one binary NOT NULL,
-  bytea_two binary NOT NULL,
-  bytea_three binary NOT NULL DEFAULT CONVERT(VARBINARY(MAX),'a'),
-  bytea_four binary NOT NULL DEFAULT CONVERT(VARBINARY(MAX),'b'),
-  bytea_five binary(100) NOT NULL DEFAULT CONVERT(VARBINARY(MAX),'abcdefghabcdefghabcdefgh'),
-  bytea_six binary(100) NOT NULL DEFAULT  CONVERT(VARBINARY(MAX),'hgfedcbahgfedcbahgfedcba'),
-  bytea_seven binary NOT NULL DEFAULT CONVERT(VARBINARY(MAX),''),
-  bytea_eight binary NOT NULL DEFAULT CONVERT(VARBINARY(MAX),''),
-  time_zero timestamp NOT NULL,
-  time_one date,
-  time_eleven date NULL,
-  time_twelve date NOT NULL,
-  time_fifteen date NULL DEFAULT '19990108',
-  time_sixteen date NOT NULL DEFAULT '1999-01-08'
-);
-GO
-
-CREATE TABLE magicest
-(
-  id int NOT NULL IDENTITY (1,1) PRIMARY KEY,
-  kk float NULL,
-  ll float NOT NULL,
-  mm tinyint NULL,
-  nn tinyint NOT NULL,
-  oo bit NULL,
-  pp bit NOT NULL,
-  qq smallint NULL,
-  rr smallint NOT NULL,
-  ss int NULL,
-  tt int NOT NULL,
-  uu bigint NULL,
-  vv bigint NOT NULL,
-  ww float NULL,
-  xx float NOT NULL,
-  yy float NULL,
-  zz float NOT NULL,
-  aaa double precision NULL,
-  bbb double precision NOT NULL,
-  ccc real NULL,
-  ddd real NOT NULL,
-  ggg date NULL,
-  hhh date NOT NULL,
-  iii datetime NULL,
-  jjj datetime NOT NULL,
-  kkk timestamp NOT NULL,
-  mmm binary NOT NULL,
-  nnn binary NOT NULL,
-  ooo varbinary(100) NOT NULL,
-  ppp varbinary(100) NOT NULL,
-  qqq varbinary NOT NULL,
-  rrr varbinary NOT NULL,
-  www varbinary(max) NOT NULL,
-  xxx varbinary(max) NOT NULL,
-  yyy varchar(100) NULL,
-  zzz varchar(100) NOT NULL,
-  aaaa char NULL,
-  bbbb char NOT NULL,
-  cccc VARCHAR(MAX) NULL,
-  dddd VARCHAR(MAX) NOT NULL,
-  eeee tinyint NULL,
-  ffff tinyint NOT NULL
-);
-GO
-
-create table owner
-(
-  id int NOT NULL IDENTITY (1,1) PRIMARY KEY,
-  name varchar(255) not null
-);
-GO
-
-create table cats
-(
-  id int NOT NULL IDENTITY (1,1) PRIMARY KEY,
-  name varchar(255) not null,
-  owner_id int
-);
-GO
-
-ALTER TABLE cats ADD CONSTRAINT cats_owner_id_fkey FOREIGN KEY (owner_id) REFERENCES owner(id);
-GO
-
-create table toys
-(
-  id int NOT NULL IDENTITY (1,1) PRIMARY KEY,
-  name varchar(255) not null
-);
-GO
-
-create table cat_toys
-(
-  cat_id int not null references cats (id),
-  toy_id int not null references toys (id),
-  primary key (cat_id, toy_id)
-);
-GO
-
-create table dog_toys
-(
-  dog_id int not null,
-  toy_id int not null,
-  primary key (dog_id, toy_id)
-);
-GO
-
-create table dragon_toys
-(
-  dragon_id varchar(100),
-  toy_id varchar(100),
-  primary key (dragon_id, toy_id)
-);
-GO
-
-create table spider_toys
-(
-  spider_id varchar(100) primary key,
-  name varchar(100)
-);
-GO
-
-create table pals
-(
-  pal varchar(100) primary key,
-  name varchar(100)
-);
-GO
-
-create table friend
-(
-  friend varchar(100) primary key,
-  name varchar(100)
-);
-GO
-
-create table bro
-(
-  bros varchar(100) primary key,
-  name varchar(100)
-);
-GO
-
-create table enemies
-(
-  enemies varchar(100) primary key,
-  name varchar(100)
-);
-GO
-
-create table chocolate
-(
-  dog varchar(100) primary key
-);
-GO
-
-create table waffles
-(
-  cat varchar(100) primary key
-);
-GO
-
-create table tigers
-(
-  id binary primary key,
-  name binary NOT NULL
-);
-GO
-
-create table elephants
-(
-  id binary primary key,
-  name binary not null,
-  tiger_id binary NOT NULL unique
-);
-GO
-
-ALTER TABLE elephants ADD CONSTRAINT elephants_tiger_id_fkey FOREIGN KEY (tiger_id) REFERENCES tigers(id);
-GO
-
-create table wolves
-(
-  id binary primary key,
-  name binary not null,
-  tiger_id binary not null unique
-);
-GO
-
-ALTER TABLE wolves ADD CONSTRAINT wolves_tiger_id_fkey FOREIGN KEY (tiger_id) REFERENCES tigers(id);
-GO
-
-create table ants
-(
-  id binary primary key,
-  name binary not null,
-  tiger_id binary not null
-);
-GO
-
-ALTER TABLE ants ADD CONSTRAINT ants_tiger_id_fkey FOREIGN KEY (tiger_id) REFERENCES tigers(id);
-GO
-
-create table worms
-(
-  id binary primary key,
-  name binary not null,
-  tiger_id binary NOT NULL
-);
-GO
-
-ALTER TABLE worms ADD CONSTRAINT worms_tiger_id_fkey FOREIGN KEY (tiger_id) REFERENCES tigers(id);
-GO
-
-create table byte_pilots
-(
-  id binary primary key not null,
-  name varchar(255)
-);
-GO
-
-create table byte_airports
-(
-  id binary primary key not null,
-  name varchar(255)
-);
-GO
-
-create table byte_languages
-(
-  id binary primary key not null,
-  name varchar(255)
-);
-GO
-
-create table byte_jets
-(
-  id binary primary key not null,
-  name varchar(255),
-  byte_pilot_id binary unique NOT NULL,
-  byte_airport_id binary NOT NULL
-);
-GO
-
-ALTER TABLE byte_jets ADD CONSTRAINT byte_jets_byte_pilot_id_fkey FOREIGN KEY (byte_pilot_id) REFERENCES byte_pilots(id);
-GO
-ALTER TABLE byte_jets ADD CONSTRAINT byte_jets_byte_airport_id_fkey FOREIGN KEY (byte_airport_id) REFERENCES byte_airports(id);
-GO
-
-create table byte_pilot_languages
-(
-  byte_pilot_id binary not null,
-  byte_language_id binary not null
-);
-GO
-
-ALTER TABLE byte_pilot_languages ADD CONSTRAINT byte_pilot_languages_pkey PRIMARY KEY (byte_pilot_id,byte_language_id);
-GO
-
-ALTER TABLE byte_pilot_languages ADD CONSTRAINT byte_pilot_languages_byte_pilot_id_fkey FOREIGN KEY (byte_pilot_id) REFERENCES byte_pilots(id);
-GO
-ALTER TABLE byte_pilot_languages ADD CONSTRAINT byte_pilot_languages_byte_language_id_fkey FOREIGN KEY (byte_language_id) REFERENCES byte_languages(id);
-GO
-
-create table cars
-(
-  id integer not null,
-  name VARCHAR(MAX),
-  primary key (id)
-);
-GO
-
-create table car_cars
-(
-  car_id integer not null,
-  awesome_car_id integer not null,
-  relation VARCHAR(MAX) not null,
-  primary key (car_id, awesome_car_id)
-);
-GO
-
-ALTER TABLE car_cars ADD CONSTRAINT car_id_fkey FOREIGN KEY (car_id) REFERENCES cars(id);
-GO
-ALTER TABLE car_cars ADD CONSTRAINT awesome_car_id_fkey FOREIGN KEY (awesome_car_id) REFERENCES cars(id);
-GO
-
-create table trucks
-(
-  id integer not null,
-  parent_id integer,
-  name VARCHAR(MAX),
-  primary key (id)
-);
-GO
-
-ALTER TABLE trucks ADD CONSTRAINT parent_id_fkey FOREIGN KEY (parent_id) REFERENCES trucks(id);
-GO
-
-CREATE TABLE race
-(
-  id integer PRIMARY KEY NOT NULL,
-  race_date datetime,
-  track VARCHAR(MAX)
-);
-GO
-
-CREATE TABLE race_results
-(
-  id integer PRIMARY KEY NOT NULL,
-  race_id integer,
-  name VARCHAR(MAX)
-);
-GO
-
-ALTER TABLE race_results ADD CONSTRAINT race_id_fkey FOREIGN KEY (race_id) REFERENCES race(id);
-GO
-
-CREATE TABLE race_result_scratchings
-(
-  id integer PRIMARY KEY NOT NULL,
-  results_id integer NOT NULL,
-  name VARCHAR(MAX) NOT NULL
-);
-GO
-
-ALTER TABLE race_result_scratchings ADD CONSTRAINT results_id_fkey FOREIGN KEY (results_id) REFERENCES race_results(id);
-GO
-
-CREATE TABLE pilots
-(
-  id integer NOT NULL,
-  name VARCHAR(MAX) NOT NULL
-);
-GO
-
-ALTER TABLE pilots ADD CONSTRAINT pilot_pkey PRIMARY KEY (id);
-GO
-
-CREATE TABLE jets
-(
-  id integer NOT NULL,
-  pilot_id integer NOT NULL,
-  age integer NOT NULL,
-  name VARCHAR(MAX) NOT NULL,
-  color VARCHAR(MAX) NOT NULL
-);
-GO
-
-ALTER TABLE jets ADD CONSTRAINT jet_pkey PRIMARY KEY (id);
-GO
-ALTER TABLE jets ADD CONSTRAINT pilots_fkey FOREIGN KEY (pilot_id) REFERENCES pilots(id);
-GO
-
-CREATE TABLE languages
-(
-  id integer NOT NULL,
-  language VARCHAR(MAX) NOT NULL
-);
-GO
-
-ALTER TABLE languages ADD CONSTRAINT language_pkey PRIMARY KEY (id);
-GO
-
--- Join table
-CREATE TABLE pilot_languages
-(
-  pilot_id integer NOT NULL,
-  language_id integer NOT NULL,
-  uniqueid uniqueidentifier NOT NULL,
-);
-GO
-
--- Composite primary key
-ALTER TABLE pilot_languages ADD CONSTRAINT pilot_language_pkey PRIMARY KEY (pilot_id, language_id);
-GO
-ALTER TABLE pilot_languages ADD CONSTRAINT pilot_language_fkey FOREIGN KEY (pilot_id) REFERENCES pilots(id);
-GO
-ALTER TABLE pilot_languages ADD CONSTRAINT languages_fkey FOREIGN KEY (language_id) REFERENCES languages(id);
-GO
-
-CREATE TABLE powers_of_two
-(
-  vid int NOT NULL IDENTITY(1,1),
-  name varchar(255) NOT NULL DEFAULT '',
-  machine_name varchar(255) NOT NULL,
-  description VARCHAR(MAX),
-  hierarchy tinyint NOT NULL DEFAULT '0',
-  module varchar(255) NOT NULL DEFAULT '',
-  weight int NOT NULL DEFAULT '0',
-  PRIMARY KEY (vid),
-  CONSTRAINT machine_name UNIQUE(machine_name)
-);
-GO
diff --git a/testdata/mysql_test_schema.sql b/testdata/mysql_test_schema.sql
index 7ed866e..423aceb 100644
--- a/testdata/mysql_test_schema.sql
+++ b/testdata/mysql_test_schema.sql
@@ -135,9 +135,7 @@ CREATE TABLE magicest (
   aaaa char NULL,
   bbbb char NOT NULL,
   cccc text NULL,
-  dddd text NOT NULL,
-  eeee tinyint(2) NULL,
-  ffff tinyint(2) NOT NULL
+  dddd text NOT NULL
 );
 
 create table owner (
@@ -240,6 +238,40 @@ create table worms (
   foreign key (tiger_id) references tigers (id)
 );
 
+create table pilots (
+  id   int primary key not null auto_increment,
+  name varchar(255)
+);
+
+create table airports (
+  id   int primary key not null auto_increment,
+  name varchar(255)
+);
+
+create table languages (
+  id   int primary key not null auto_increment,
+  name varchar(255)
+);
+
+create table jets (
+  id         int primary key not null auto_increment,
+  name       varchar(255),
+  pilot_id   integer,
+  airport_id integer,
+
+  foreign key (pilot_id) references pilots (id),
+  foreign key (airport_id) references airports (id)
+);
+
+create table pilot_languages (
+  pilot_id    integer not null,
+  language_id integer not null,
+
+  primary key (pilot_id, language_id),
+  foreign key (pilot_id) references pilots (id),
+  foreign key (language_id) references languages (id)
+);
+
 create table byte_pilots (
   id   binary primary key not null,
   name varchar(255)
@@ -317,51 +349,3 @@ CREATE TABLE race_result_scratchings (
     foreign key (results_id) references race_results(id)
 );
 
-CREATE TABLE pilots (
-  id integer NOT NULL,
-  name text NOT NULL
-);
-
-ALTER TABLE pilots ADD CONSTRAINT pilot_pkey PRIMARY KEY (id);
-
-CREATE TABLE jets (
-  id integer NOT NULL,
-  pilot_id integer NOT NULL,
-  age integer NOT NULL,
-  name text NOT NULL,
-  color text NOT NULL
-);
-
-ALTER TABLE jets ADD CONSTRAINT jet_pkey PRIMARY KEY (id);
-ALTER TABLE jets ADD CONSTRAINT pilots_fkey FOREIGN KEY (pilot_id) REFERENCES pilots(id);
-
-CREATE TABLE languages (
-  id integer NOT NULL,
-  language text NOT NULL
-);
-
-ALTER TABLE languages ADD CONSTRAINT language_pkey PRIMARY KEY (id);
-
--- Join table
-CREATE TABLE pilot_languages (
-  pilot_id integer NOT NULL,
-  language_id integer NOT NULL
-);
-
--- Composite primary key
-ALTER TABLE pilot_languages ADD CONSTRAINT pilot_language_pkey PRIMARY KEY (pilot_id, language_id);
-ALTER TABLE pilot_languages ADD CONSTRAINT pilot_language_fkey FOREIGN KEY (pilot_id) REFERENCES pilots(id);
-ALTER TABLE pilot_languages ADD CONSTRAINT languages_fkey FOREIGN KEY (language_id) REFERENCES languages(id);
-
-CREATE TABLE powers_of_two (
-  vid int(10) unsigned NOT NULL AUTO_INCREMENT, 
-  name varchar(255) NOT NULL DEFAULT '',
-  machine_name varchar(255) NOT NULL DEFAULT '',
-  description longtext,
-  hierarchy tinyint(3) unsigned NOT NULL DEFAULT '0',
-  module varchar(255) NOT NULL DEFAULT '',
-  weight int(11) NOT NULL DEFAULT '0',
-  PRIMARY KEY (vid),
-  UNIQUE KEY machine_name (machine_name),
-  KEY list (weight,name)
-) ENGINE=InnoDB AUTO_INCREMENT=3 DEFAULT CHARSET=utf8;
diff --git a/testdata/postgres_test_schema.sql b/testdata/postgres_test_schema.sql
index 3e29c74..7a3893e 100644
--- a/testdata/postgres_test_schema.sql
+++ b/testdata/postgres_test_schema.sql
@@ -246,6 +246,7 @@ create table enemies (
   primary key (enemies)
 );
 
+
 create table chocolate (
   dog varchar(100) primary key
 );
@@ -306,16 +307,37 @@ create table worms (
   foreign key (tiger_id) references tigers (id)
 );
 
-create table addresses (
-  id bytea primary key,
-  name bytea null
+create table pilots (
+  id   serial primary key not null,
+  name character varying
 );
 
-create table houses (
-  id bytea primary key,
-  name bytea not null,
-  address_id bytea not null unique,
-  foreign key (address_id) references addresses (id)
+create table airports (
+  id   serial primary key not null,
+  name character varying
+);
+
+create table languages (
+  id   serial primary key not null,
+  name character varying
+);
+
+create table jets (
+  id         serial primary key not null,
+  name       character varying,
+  pilot_id   integer,
+  airport_id integer,
+  foreign key (pilot_id) references pilots (id),
+  foreign key (airport_id) references airports (id)
+);
+
+create table pilot_languages (
+  pilot_id    integer not null,
+  language_id integer not null,
+
+  primary key (pilot_id, language_id),
+  foreign key (pilot_id) references pilots (id),
+  foreign key (language_id) references languages (id)
 );
 
 create table byte_pilots (
@@ -394,41 +416,3 @@ CREATE TABLE race_result_scratchings (
     name text NOT NULL,
     foreign key (results_id) references race_results(id)
 );
-
-CREATE TABLE pilots (
-  id integer NOT NULL,
-  name text NOT NULL
-);
-
-ALTER TABLE pilots ADD CONSTRAINT pilot_pkey PRIMARY KEY (id);
-
-CREATE TABLE jets (
-  id integer NOT NULL,
-  pilot_id integer NOT NULL,
-  age integer NOT NULL,
-  name text NOT NULL,
-  color text NOT NULL
-);
-
-ALTER TABLE jets ADD CONSTRAINT jet_pkey PRIMARY KEY (id);
--- The following fkey remains poorly named to avoid regressions related to psql naming
-ALTER TABLE jets ADD CONSTRAINT pilots_fkey FOREIGN KEY (pilot_id) REFERENCES pilots(id);
-
-CREATE TABLE languages (
-  id integer NOT NULL,
-  language text NOT NULL
-);
-
-ALTER TABLE languages ADD CONSTRAINT language_pkey PRIMARY KEY (id);
-
--- Join table
-CREATE TABLE pilot_languages (
-  pilot_id integer NOT NULL,
-  language_id integer NOT NULL
-);
-
--- Composite primary key
-ALTER TABLE pilot_languages ADD CONSTRAINT pilot_language_pkey PRIMARY KEY (pilot_id, language_id);
--- The following fkey remains poorly named to avoid regressions related to psql naming
-ALTER TABLE pilot_languages ADD CONSTRAINT pilots_fkey FOREIGN KEY (pilot_id) REFERENCES pilots(id);
-ALTER TABLE pilot_languages ADD CONSTRAINT languages_fkey FOREIGN KEY (language_id) REFERENCES languages(id);
diff --git a/boilingcore/text_helpers.go b/text_helpers.go
similarity index 98%
rename from boilingcore/text_helpers.go
rename to text_helpers.go
index 0bce73f..56451e6 100644
--- a/boilingcore/text_helpers.go
+++ b/text_helpers.go
@@ -1,11 +1,11 @@
-package boilingcore
+package main
 
 import (
 	"fmt"
 	"strings"
 
-	"github.com/lbryio/sqlboiler/bdb"
-	"github.com/lbryio/sqlboiler/strmangle"
+	"github.com/vattle/sqlboiler/bdb"
+	"github.com/vattle/sqlboiler/strmangle"
 )
 
 // TxtToOne contains text that will be used by templates for a one-to-many or
diff --git a/boilingcore/text_helpers_test.go b/text_helpers_test.go
similarity index 98%
rename from boilingcore/text_helpers_test.go
rename to text_helpers_test.go
index 835ed0d..b203dd9 100644
--- a/boilingcore/text_helpers_test.go
+++ b/text_helpers_test.go
@@ -1,12 +1,12 @@
-package boilingcore
+package main
 
 import (
 	"reflect"
 	"testing"
 
 	"github.com/davecgh/go-spew/spew"
-	"github.com/lbryio/sqlboiler/bdb"
-	"github.com/lbryio/sqlboiler/bdb/drivers"
+	"github.com/vattle/sqlboiler/bdb"
+	"github.com/vattle/sqlboiler/bdb/drivers"
 )
 
 func TestTxtsFromOne(t *testing.T) {