Compare commits
192 commits
fix-exists
...
master
Author | SHA1 | Date | |
---|---|---|---|
|
6b4e052bed | ||
|
3db4f30f56 | ||
|
256a6d4225 | ||
|
3f035a9fe2 | ||
|
c01b182839 | ||
|
e3fe976c3c | ||
|
4e1b83ab39 | ||
|
f892107dad | ||
|
fadcbfa8b6 | ||
|
29172e976b | ||
|
eea3d349a7 | ||
|
d180a095ca | ||
|
3baa9e72ca | ||
|
bc08aa6160 | ||
|
466f2d5b2c | ||
|
45ee5c902f | ||
|
8d4055e3eb | ||
|
396f42bc91 | ||
|
77fc991e7b | ||
|
e4a52e21b6 | ||
|
0b0a1f21c2 | ||
|
cd445bf2f4 | ||
|
55f42bc038 | ||
|
544ff7afdd | ||
|
faec346481 | ||
|
3abac13aeb | ||
|
8837a986ee | ||
|
ce9d13abf0 | ||
|
ca9f47de8b | ||
|
e339812027 | ||
|
23f245776d | ||
|
99a3a1d091 | ||
|
9c8262b702 | ||
|
05c7f7d06a | ||
|
ed423a3606 | ||
|
09c585cdb1 | ||
|
912a689701 | ||
|
31fe8b6e1d | ||
|
b160e5c1f4 | ||
|
ed43c9078f | ||
|
f863ecb48e | ||
|
451723ccb9 | ||
|
687b0506db | ||
|
1467b88f04 | ||
|
070df18197 | ||
|
3b5ab423b3 | ||
|
632b89fae0 | ||
|
112a836af2 | ||
|
7a8d78cceb | ||
|
ecad5334fb | ||
|
196b4099a1 | ||
|
a3442a5ab4 | ||
|
a9264e6447 | ||
|
bdd28d9d5b | ||
|
7de817b111 | ||
|
0b44c26a26 | ||
|
36ae141304 | ||
|
85c9104d34 | ||
|
5ee4b06c9b | ||
|
3a38ed1f7a | ||
|
e4d1e606fd | ||
|
6a0817d37d | ||
|
5a33894412 | ||
|
d9b991e487 | ||
|
91a798d9af | ||
|
0818af0e26 | ||
|
10cfe74989 | ||
|
d13410617f | ||
|
91950e711e | ||
|
3d1f3fc609 | ||
|
4560da9bd1 | ||
|
fd2c7ad764 | ||
|
b2acda8ade | ||
|
f913d5e791 | ||
|
178d925c24 | ||
|
783e7a82b3 | ||
|
b6d0a5142a | ||
|
e1dfd0bb1e | ||
|
aca431b3ce | ||
|
f94fa547e7 | ||
|
a4ba4b6e9d | ||
|
dfbcef7fe3 | ||
|
874d2ed2e6 | ||
|
dbf897977a | ||
|
b10df110a0 | ||
|
b4fc4447f8 | ||
|
83b935168f | ||
|
8ef33a1127 | ||
|
f4b9b2a406 | ||
|
9bafa2f158 | ||
|
6bc6b1690c | ||
|
13bdda4e20 | ||
|
68ac8a3c34 | ||
|
d40d074320 | ||
|
12dce9d986 | ||
|
a508530f63 | ||
|
49d052af53 | ||
|
e6c6056353 | ||
|
ff5957e4e8 | ||
|
ccb0c9f6c8 | ||
|
97e32c21be | ||
|
7162e9e927 | ||
|
8874738f73 | ||
|
21917fcfc6 | ||
|
f3f8074833 | ||
|
4718e72b6d | ||
|
35eefdda41 | ||
|
79f6c344c1 | ||
|
5bc7addb8a | ||
|
2e9535d56e | ||
|
b1b0249898 | ||
|
8de60ee26a | ||
|
e943c37a99 | ||
|
0a50c90a58 | ||
|
4c2062caee | ||
|
f45f98ef03 | ||
|
ad15668462 | ||
|
bfab02bde1 | ||
|
54f960659b | ||
|
6fad1bd148 | ||
|
705befef07 | ||
|
85fc6e546a | ||
|
6aadf439c9 | ||
|
51e9961f0a | ||
|
5d72569635 | ||
|
813fd43e03 | ||
|
a48e5f7f82 | ||
|
e2e619dfb2 | ||
|
0bdbee86f1 | ||
|
4f02886c2b | ||
|
b783311248 | ||
|
e3254c1851 | ||
|
aa01f21b5c | ||
|
ed6dad7eed | ||
|
83d90f3f28 | ||
|
454a3a816a | ||
|
b3c4580efa | ||
|
cbdbe4891a | ||
|
e2f32e746e | ||
|
b4c9af72e7 | ||
|
2d3983b992 | ||
|
54af37ef9a | ||
|
132339026a | ||
|
e00ea60679 | ||
|
e8723a2797 | ||
|
60a40eaf48 | ||
|
9067237f32 | ||
|
8f96ba35d6 | ||
|
a5cb765403 | ||
|
8aa4e2148c | ||
|
3747754236 | ||
|
e2c53aa312 | ||
|
dea061f571 | ||
|
e70ed0d070 | ||
|
60814eed1f | ||
|
acaf94ef5f | ||
|
10ee452032 | ||
|
59c238539d | ||
|
0221da2491 | ||
|
671e1d7ae9 | ||
|
ec28cb4148 | ||
|
c02a2f0c20 | ||
|
d79f816cc1 | ||
|
936df65ebc | ||
|
bdb0ef1b7c | ||
|
c33bb479a1 | ||
|
e992e327c2 | ||
|
761efee9f0 | ||
|
bfba60eaad | ||
|
cb54ded207 | ||
|
faf511a7e6 | ||
|
4506da6b41 | ||
|
788d28400e | ||
|
88cde8df0c | ||
|
ab01c9d234 | ||
|
aab85fd2cd | ||
|
61ab7e254a | ||
|
4997ba96bf | ||
|
711ecbbe8d | ||
|
fac1a7fe69 | ||
|
be2131c763 | ||
|
158d9cbec0 | ||
|
22f7a45847 | ||
|
4ae9336538 | ||
|
14b9122a08 | ||
|
5f7bee14a0 | ||
|
23b6221f8b | ||
|
dd73656d8e | ||
|
920cb42f9b | ||
|
dea748d409 | ||
|
5449ce7c6c | ||
|
94a6604d19 |
89 changed files with 4280 additions and 1161 deletions
208
.circleci/config.yml
Normal file
208
.circleci/config.yml
Normal file
|
@ -0,0 +1,208 @@
|
||||||
|
version: 2
|
||||||
|
jobs:
|
||||||
|
build:
|
||||||
|
working_directory: /root
|
||||||
|
docker:
|
||||||
|
- image: aarondl0/sqlboiler-test:latest
|
||||||
|
|
||||||
|
- image: postgres:9.6
|
||||||
|
environment:
|
||||||
|
POSTGRES_PASSWORD: psqlpassword
|
||||||
|
|
||||||
|
- image: mysql:5.7
|
||||||
|
environment:
|
||||||
|
MYSQL_ROOT_PASSWORD: mysqlpassword
|
||||||
|
|
||||||
|
- image: microsoft/mssql-server-linux:ctp1-4
|
||||||
|
environment:
|
||||||
|
ACCEPT_EULA: 'Y'
|
||||||
|
SA_PASSWORD: 'R@@tr@@t1234'
|
||||||
|
|
||||||
|
environment:
|
||||||
|
GOPATH: /go
|
||||||
|
ROOTPATH: /go/src/github.com/vattle/sqlboiler
|
||||||
|
|
||||||
|
steps:
|
||||||
|
- run:
|
||||||
|
name: Add PSQL Creds
|
||||||
|
command: |
|
||||||
|
echo "*:*:*:*:psqlpassword" > /root/.pgpass
|
||||||
|
chmod 600 /root/.pgpass
|
||||||
|
- run:
|
||||||
|
name: Add MySQL Creds
|
||||||
|
command: |
|
||||||
|
echo -e "[client]\nuser = root\npassword = mysqlpassword\nhost = localhost\nprotocol = tcp" > /root/.my.cnf
|
||||||
|
chmod 600 /root/.my.cnf
|
||||||
|
|
||||||
|
- run:
|
||||||
|
name: Wait for PSQL
|
||||||
|
command: >
|
||||||
|
for i in `seq 30`; do
|
||||||
|
echo "Waiting for psql"
|
||||||
|
set +o errexit
|
||||||
|
psql --host localhost --username postgres --dbname template1 -c 'select * from information_schema.tables;' > /dev/null
|
||||||
|
status=$?
|
||||||
|
set -o errexit
|
||||||
|
if [ $status -eq 0 ]; then
|
||||||
|
break
|
||||||
|
fi
|
||||||
|
if [ $i -eq 30 ]; then
|
||||||
|
echo "Failed to wait for psql"
|
||||||
|
exit 1
|
||||||
|
fi
|
||||||
|
sleep 1
|
||||||
|
done
|
||||||
|
|
||||||
|
- run:
|
||||||
|
name: Wait for MySQL
|
||||||
|
command: >
|
||||||
|
for i in `seq 30`; do
|
||||||
|
echo "Waiting for mysql"
|
||||||
|
set +o errexit
|
||||||
|
mysql --execute 'select * from information_schema.tables;' > /dev/null
|
||||||
|
status=$?
|
||||||
|
set -o errexit
|
||||||
|
if [ $status -eq 0 ]; then
|
||||||
|
break
|
||||||
|
fi
|
||||||
|
if [ $i -eq 30 ]; then
|
||||||
|
echo "Failed to wait for mysql"
|
||||||
|
exit 1
|
||||||
|
fi
|
||||||
|
sleep 1
|
||||||
|
done
|
||||||
|
|
||||||
|
- run:
|
||||||
|
name: Wait for MSSQL
|
||||||
|
command: >
|
||||||
|
for i in `seq 30`; do
|
||||||
|
echo "Waiting for mssql"
|
||||||
|
set +o errexit
|
||||||
|
sqlcmd -H localhost -U sa -P R@@tr@@t1234 -Q "select * from information_schema.tables;" > /dev/null
|
||||||
|
status=$?
|
||||||
|
set -o errexit
|
||||||
|
if [ $status -eq 0 ]; then
|
||||||
|
break
|
||||||
|
fi
|
||||||
|
if [ $i -eq 30 ]; then
|
||||||
|
echo "Failed to wait for mssql"
|
||||||
|
exit 1
|
||||||
|
fi
|
||||||
|
sleep 1
|
||||||
|
done
|
||||||
|
|
||||||
|
- run:
|
||||||
|
name: Make GOPATH
|
||||||
|
command: mkdir -p /go/src/github.com/vattle/sqlboiler
|
||||||
|
|
||||||
|
- checkout:
|
||||||
|
path: /go/src/github.com/vattle/sqlboiler
|
||||||
|
|
||||||
|
- run:
|
||||||
|
name: Create PSQL DB
|
||||||
|
command: |
|
||||||
|
createdb --host localhost --username postgres --owner postgres sqlboiler
|
||||||
|
psql --host localhost --username postgres --dbname sqlboiler < $ROOTPATH/testdata/postgres_test_schema.sql
|
||||||
|
- run:
|
||||||
|
name: Create MySQL DB
|
||||||
|
command: |
|
||||||
|
mysql --host localhost --execute 'create database sqlboiler;'
|
||||||
|
mysql --host localhost --database sqlboiler < $ROOTPATH/testdata/mysql_test_schema.sql
|
||||||
|
- run:
|
||||||
|
name: Create MSSQL DB
|
||||||
|
command: |
|
||||||
|
sqlcmd -S localhost -U sa -P R@@tr@@t1234 -Q "create database sqlboiler;"
|
||||||
|
sqlcmd -S localhost -U sa -P R@@tr@@t1234 -d sqlboiler -i $ROOTPATH/testdata/mssql_test_schema.sql
|
||||||
|
|
||||||
|
- run:
|
||||||
|
name: Build SQLBoiler
|
||||||
|
command: |
|
||||||
|
cd $ROOTPATH; go get -v -t
|
||||||
|
cd $ROOTPATH; go build -v github.com/vattle/sqlboiler
|
||||||
|
|
||||||
|
- run:
|
||||||
|
name: 'Configure SQLBoiler: PSQL'
|
||||||
|
command: echo -e '[postgres]\nhost="localhost"\nport=5432\nuser="postgres"\npass="psqlpassword"\ndbname="sqlboiler"\nsslmode="disable"\n' > $ROOTPATH/sqlboiler.toml
|
||||||
|
- run:
|
||||||
|
name: 'Configure SQLBoiler: MySQL'
|
||||||
|
command: echo -e '[mysql]\nhost="localhost"\nport=3306\nuser="root"\npass="mysqlpassword"\ndbname="sqlboiler"\nsslmode="false"\n' >> $ROOTPATH/sqlboiler.toml
|
||||||
|
- run:
|
||||||
|
name: 'Configure SQLBoiler: MSSQL'
|
||||||
|
command: echo -e '[mssql]\nhost="localhost"\nport=1433\nuser="sa"\npass="R@@tr@@t1234"\ndbname="sqlboiler"\nsslmode="disable"\n' >> $ROOTPATH/sqlboiler.toml
|
||||||
|
|
||||||
|
- run:
|
||||||
|
name: 'Generate: PSQL'
|
||||||
|
command: cd $ROOTPATH; ./sqlboiler -o postgres postgres
|
||||||
|
- run:
|
||||||
|
name: 'Generate: MySQL'
|
||||||
|
command: cd $ROOTPATH; ./sqlboiler -o mysql mysql
|
||||||
|
- run:
|
||||||
|
name: 'Generate: MSSQL'
|
||||||
|
command: cd $ROOTPATH; ./sqlboiler -o mssql mssql
|
||||||
|
|
||||||
|
- run:
|
||||||
|
name: Download generated and test deps
|
||||||
|
command: |
|
||||||
|
cd $ROOTPATH
|
||||||
|
go get -v -t ./...
|
||||||
|
|
||||||
|
- run:
|
||||||
|
name: Run Tests
|
||||||
|
command: |
|
||||||
|
cd $ROOTPATH
|
||||||
|
cp ./testdata/mssql_test_schema.sql mssql/tables_schema.sql
|
||||||
|
go test -v -race ./... | tee test_out.txt
|
||||||
|
|
||||||
|
- run:
|
||||||
|
name: Convert test output to JUNIT
|
||||||
|
command: |
|
||||||
|
mkdir -p $HOME/test_results/go
|
||||||
|
cat $ROOTPATH/test_out.txt | go-junit-report > $HOME/test_results/go/out.xml
|
||||||
|
|
||||||
|
- store_test_results:
|
||||||
|
path: test_results
|
||||||
|
#test:
|
||||||
|
# pre:
|
||||||
|
# - echo -e "[postgres]\nhost=\"localhost\"\nport=5432\nuser=\"ubuntu\"\ndbname=\"sqlboiler\"\n" > sqlboiler.toml
|
||||||
|
# - createdb -U ubuntu sqlboiler
|
||||||
|
# - psql -U ubuntu sqlboiler < ./testdata/postgres_test_schema.sql
|
||||||
|
#
|
||||||
|
# - echo -e "[mysql]\nhost=\"localhost\"\nport=3306\nuser=\"ubuntu\"\ndbname=\"sqlboiler\"\nsslmode=\"false\"\n" >> sqlboiler.toml
|
||||||
|
# - echo "create database sqlboiler;" | mysql -u ubuntu
|
||||||
|
# - mysql -u ubuntu sqlboiler < ./testdata/mysql_test_schema.sql
|
||||||
|
#
|
||||||
|
# - echo -e "[mssql]\nhost=\"localhost\"\nport=1433\nuser=\"sa\"\ndbname=\"sqlboiler\"\nsslmode=\"disable\"\n" >> sqlboiler.toml
|
||||||
|
# - docker run -e 'ACCEPT_EULA=Y' -e 'SA_PASSWORD=R@@tr@@t1234' -p 1433:1433 -d --name mssql microsoft/mssql-server-linux
|
||||||
|
# - sqlcmd -S localhost -U sa -P R@@tr@@t1234 -Q "create database sqlboiler;"
|
||||||
|
# - sqlcmd -S localhost -U sa -P R@@tr@@t1234 -d sqlboiler -i ./testdata/mssql_test_schema.sql
|
||||||
|
#
|
||||||
|
# - ./sqlboiler -o postgres postgres
|
||||||
|
# - ./sqlboiler -o mysql mysql
|
||||||
|
# - ./sqlboiler -o mssql mssql
|
||||||
|
# - cp ./testdata/mssql_test_schema.sql mssql/tables_schema.sql
|
||||||
|
# override:
|
||||||
|
# - go test -v -race ./... > $CIRCLE_ARTIFACTS/gotest.txt
|
||||||
|
# post:
|
||||||
|
# - cat $CIRCLE_ARTIFACTS/gotest.txt | go-junit-report > $CIRCLE_TEST_REPORTS/junit.xml
|
||||||
|
#
|
||||||
|
#machine:
|
||||||
|
# environment:
|
||||||
|
# GODIST: go1.7.linux-amd64.tar.gz
|
||||||
|
# PATH: /home/ubuntu/.go_workspace/bin:/usr/local/go/bin:/home/ubuntu/bin:/usr/local/sbin:/usr/local/bin:/usr/sbin:/usr/bin:/sbin:/bin:/usr/games:/usr/local/games:/opt/mssql-tools/bin
|
||||||
|
# post:
|
||||||
|
# - mkdir -p download
|
||||||
|
# - test -e download/$GODIST || curl -o download/$GODIST https://storage.googleapis.com/golang/$GODIST
|
||||||
|
# - sudo rm -rf /usr/local/go
|
||||||
|
# - sudo tar -C /usr/local -xzf download/$GODIST
|
||||||
|
#
|
||||||
|
#dependencies:
|
||||||
|
# pre:
|
||||||
|
# - mkdir -p /home/ubuntu/.go_workspace/src/github.com/jstemmer
|
||||||
|
# - go get -u github.com/jstemmer/go-junit-report
|
||||||
|
#
|
||||||
|
# - curl https://packages.microsoft.com/keys/microsoft.asc | sudo apt-key add -
|
||||||
|
# - curl https://packages.microsoft.com/config/ubuntu/14.04/prod.list | sudo tee /etc/apt/sources.list.d/msprod.list
|
||||||
|
# - sudo apt-get update; sudo apt-get install mssql-tools unixodbc-dev
|
||||||
|
# - docker pull microsoft/mssql-server-linux
|
||||||
|
# cache_directories:
|
||||||
|
# - ~/download
|
18
.github/ISSUE_TEMPLATE.md
vendored
Normal file
18
.github/ISSUE_TEMPLATE.md
vendored
Normal file
|
@ -0,0 +1,18 @@
|
||||||
|
If you're having a generation problem please answer these questions before submitting your issue. Thanks!
|
||||||
|
|
||||||
|
### What version of SQLBoiler are you using (`sqlboiler --version`)?
|
||||||
|
|
||||||
|
|
||||||
|
### If this happened at generation time what was the full SQLBoiler command you used to generate your models? (if not applicable leave blank)
|
||||||
|
|
||||||
|
|
||||||
|
### If this happened at runtime what code produced the issue? (if not applicable leave blank)
|
||||||
|
|
||||||
|
|
||||||
|
### What is the output of the command above with the `-d` flag added to it? (Provided you are comfortable sharing this, it contains a blueprint of your schema)
|
||||||
|
|
||||||
|
|
||||||
|
### Please provide a relevant database schema so we can replicate your issue (Provided you are comfortable sharing this)
|
||||||
|
|
||||||
|
|
||||||
|
### Further information. What did you do, what did you expect?
|
1
.gitignore
vendored
1
.gitignore
vendored
|
@ -4,3 +4,4 @@ sqlboiler.toml
|
||||||
models/
|
models/
|
||||||
testschema.sql
|
testschema.sql
|
||||||
.cover
|
.cover
|
||||||
|
/.idea
|
||||||
|
|
60
CONTRIBUTING.md
Normal file
60
CONTRIBUTING.md
Normal file
|
@ -0,0 +1,60 @@
|
||||||
|
# Contributing
|
||||||
|
|
||||||
|
Thanks for your interest in contributing to SQLBoiler!
|
||||||
|
|
||||||
|
We have a very lightweight process and aim to keep it that way.
|
||||||
|
Read the sections for the piece you're interested in and go from
|
||||||
|
there.
|
||||||
|
|
||||||
|
If you need quick communication we're usually on [Slack](https://sqlboiler.from-the.cloud).
|
||||||
|
|
||||||
|
# New Code / Features
|
||||||
|
|
||||||
|
## Small Change
|
||||||
|
|
||||||
|
#### TLDR
|
||||||
|
|
||||||
|
1. Open PR against **dev** branch with explanation
|
||||||
|
1. Participate in Github Code Review
|
||||||
|
|
||||||
|
#### Long version
|
||||||
|
|
||||||
|
For code that requires little to no discussion, please just open a pull request with some
|
||||||
|
explanation against the **dev** branch. All code goes through dev before going out in a release.
|
||||||
|
|
||||||
|
## Bigger Change
|
||||||
|
|
||||||
|
#### TLDR
|
||||||
|
|
||||||
|
1. Start proposal of idea in Github issue
|
||||||
|
1. After design concensus, open PR with the work against the **dev** branch
|
||||||
|
1. Participate in Github Code Review
|
||||||
|
|
||||||
|
#### Long version
|
||||||
|
|
||||||
|
If however you're working on something bigger, it's usually better to check with us on the idea
|
||||||
|
before starting on a pull request, just so there's no time wasted in redoing/refactoring or being
|
||||||
|
outright rejected because the PR is at odds with the design. The best way to accomplish this is to
|
||||||
|
open an issue to discuss it. It can always start as a Slack conversation but should eventually end
|
||||||
|
up as an issue to avoid penalizing the rest of the users for not being on Slack. Once we agree on
|
||||||
|
the way to do something, then open the PR against the **dev** branch and we'll commence code review
|
||||||
|
with the Github code review tools. Then it will be merged into dev, and later go out in a release.
|
||||||
|
|
||||||
|
# Bugs
|
||||||
|
|
||||||
|
Issues should be filed on Github, simply use the template provided and fill in detail. If there's
|
||||||
|
more information you feel you should give use your best judgement and add it in, the more the better.
|
||||||
|
See the section below for information on providing database schemas.
|
||||||
|
|
||||||
|
Bugs that have responses from contributors but no action from those who opened them after a time
|
||||||
|
will be closed with the comment: "Stale"
|
||||||
|
|
||||||
|
## Schemas
|
||||||
|
|
||||||
|
A database schema can help us fix generation issues very quickly. However not everyone is willing to part
|
||||||
|
with their database schema for various reasons and that's fine. Instead of providing the schema please
|
||||||
|
then provide a subset of your database (you can munge the names so as to be unrecognizable) that can
|
||||||
|
help us reproduce the problem.
|
||||||
|
|
||||||
|
*Note:* Your schema information is included in the output from `--debug`, so be careful giving this
|
||||||
|
information out publicly on a Github issue if you're sensitive about this.
|
274
README.md
274
README.md
|
@ -1,7 +1,10 @@
|
||||||
# SQLBoiler
|
![sqlboiler logo](http://i.imgur.com/NJtCT7y.png)
|
||||||
|
|
||||||
[![License](https://img.shields.io/badge/license-BSD-blue.svg)](https://github.com/vattle/sqlboiler/blob/master/LICENSE)
|
[![License](https://img.shields.io/badge/license-BSD-blue.svg)](https://github.com/vattle/sqlboiler/blob/master/LICENSE)
|
||||||
[![GoDoc](https://godoc.org/github.com/vattle/sqlboiler?status.svg)](https://godoc.org/github.com/vattle/sqlboiler)
|
[![GoDoc](https://godoc.org/github.com/vattle/sqlboiler?status.svg)](https://godoc.org/github.com/vattle/sqlboiler)
|
||||||
|
[![Mail](https://img.shields.io/badge/mail%20list-sqlboiler-lightgrey.svg)](https://groups.google.com/a/volatile.tech/forum/#!forum/sqlboiler)
|
||||||
|
[![Mail-Annc](https://img.shields.io/badge/mail%20list-sqlboiler--announce-lightgrey.svg)](https://groups.google.com/a/volatile.tech/forum/#!forum/sqlboiler-announce)
|
||||||
|
[![Slack](https://img.shields.io/badge/slack-%23general-lightgrey.svg)](https://sqlboiler.from-the.cloud)
|
||||||
[![CircleCI](https://circleci.com/gh/vattle/sqlboiler.svg?style=shield)](https://circleci.com/gh/vattle/sqlboiler)
|
[![CircleCI](https://circleci.com/gh/vattle/sqlboiler.svg?style=shield)](https://circleci.com/gh/vattle/sqlboiler)
|
||||||
[![Go Report Card](https://goreportcard.com/badge/vattle/sqlboiler)](http://goreportcard.com/report/vattle/sqlboiler)
|
[![Go Report Card](https://goreportcard.com/badge/vattle/sqlboiler)](http://goreportcard.com/report/vattle/sqlboiler)
|
||||||
|
|
||||||
|
@ -48,6 +51,8 @@ Table of Contents
|
||||||
* [Download](#download)
|
* [Download](#download)
|
||||||
* [Configuration](#configuration)
|
* [Configuration](#configuration)
|
||||||
* [Initial Generation](#initial-generation)
|
* [Initial Generation](#initial-generation)
|
||||||
|
* [Regeneration](#regeneration)
|
||||||
|
* [Extending Generated Models](#extending-generated-models)
|
||||||
* [Diagnosing Problems](#diagnosing-problems)
|
* [Diagnosing Problems](#diagnosing-problems)
|
||||||
* [Features & Examples](#features--examples)
|
* [Features & Examples](#features--examples)
|
||||||
* [Automatic CreatedAt/UpdatedAt](#automatic-createdatupdatedat)
|
* [Automatic CreatedAt/UpdatedAt](#automatic-createdatupdatedat)
|
||||||
|
@ -104,9 +109,12 @@ Table of Contents
|
||||||
|
|
||||||
- PostgreSQL
|
- PostgreSQL
|
||||||
- MySQL
|
- MySQL
|
||||||
|
- Microsoft SQL Server
|
||||||
|
|
||||||
*Note: Seeking contributors for other database engines.*
|
*Note: Seeking contributors for other database engines.*
|
||||||
|
|
||||||
|
*Microsoft SQL Server: Limit with offset support only for SQL Server 2012 and above.*
|
||||||
|
|
||||||
### A Small Taste
|
### A Small Taste
|
||||||
|
|
||||||
For a comprehensive list of available operations and examples please see [Features & Examples](#features--examples).
|
For a comprehensive list of available operations and examples please see [Features & Examples](#features--examples).
|
||||||
|
@ -256,13 +264,14 @@ not to pass them through the command line or environment variables:
|
||||||
Example:
|
Example:
|
||||||
|
|
||||||
```toml
|
```toml
|
||||||
|
blacklist=["migrations", "other"]
|
||||||
|
schema="myschema"
|
||||||
[postgres]
|
[postgres]
|
||||||
dbname="dbname"
|
dbname="dbname"
|
||||||
host="localhost"
|
host="localhost"
|
||||||
port=5432
|
port=5432
|
||||||
user="dbusername"
|
user="dbusername"
|
||||||
pass="dbpassword"
|
pass="dbpassword"
|
||||||
|
|
||||||
[mysql]
|
[mysql]
|
||||||
dbname="dbname"
|
dbname="dbname"
|
||||||
host="localhost"
|
host="localhost"
|
||||||
|
@ -270,6 +279,13 @@ port=3306
|
||||||
user="dbusername"
|
user="dbusername"
|
||||||
pass="dbpassword"
|
pass="dbpassword"
|
||||||
sslmode="false"
|
sslmode="false"
|
||||||
|
[mssql]
|
||||||
|
dbname="dbname"
|
||||||
|
host="localhost"
|
||||||
|
port=1433
|
||||||
|
user="dbusername"
|
||||||
|
pass="dbpassword"
|
||||||
|
sslmode="disable"
|
||||||
```
|
```
|
||||||
|
|
||||||
#### Initial Generation
|
#### Initial Generation
|
||||||
|
@ -316,6 +332,119 @@ sqlboiler -b goose_migrations postgres
|
||||||
go test ./models
|
go test ./models
|
||||||
```
|
```
|
||||||
|
|
||||||
|
*Note: No `mysqldump` or `pg_dump` equivalent for Microsoft SQL Server, so generated tests must be supplemented by `tables_schema.sql` with `CREATE TABLE ...` queries*
|
||||||
|
|
||||||
|
|
||||||
|
You can use `go generate` for SQLBoiler if you want to to make it easy to
|
||||||
|
run the command.
|
||||||
|
|
||||||
|
It's important to not modify anything in the output folder, which brings us to
|
||||||
|
the next topic: regeneration.
|
||||||
|
|
||||||
|
#### Regeneration
|
||||||
|
|
||||||
|
When regenerating the models it's recommended that you completely delete the
|
||||||
|
generated directory in a build script or use the `--wipe` flag in SQLBoiler.
|
||||||
|
The reasons for this are that sqlboiler doesn't try to diff your files in any
|
||||||
|
smart way, it simply writes the files it's going to write whether they're there
|
||||||
|
or not and doesn't delete any files that were added by you or previous runs of
|
||||||
|
SQLBoiler. In the best case this can cause compilation errors, in the worst case
|
||||||
|
this may leave extraneous and unusable code that was generated against tables
|
||||||
|
that are no longer in the database.
|
||||||
|
|
||||||
|
The bottom line is that this tool should always produce the same result from
|
||||||
|
the same source. And the intention is to always regenerate from a pure state.
|
||||||
|
The only reason the `--wipe` flag isn't defaulted to on is because we don't
|
||||||
|
like programs that `rm -rf` things on the filesystem without being asked to.
|
||||||
|
|
||||||
|
#### Extending generated models
|
||||||
|
|
||||||
|
There will probably come a time when you want to extend the generated models
|
||||||
|
with some kinds of helper functions. A general guideline is to put your
|
||||||
|
extension functions into a separate package so that your functions aren't
|
||||||
|
accidentally deleted when regenerating. Past that there are 3 main ways to
|
||||||
|
extend the models, the first way is the most desirable:
|
||||||
|
|
||||||
|
**Method 1: Simple Functions**
|
||||||
|
|
||||||
|
```go
|
||||||
|
// Package modext is for SQLBoiler helper methods
|
||||||
|
package modext
|
||||||
|
|
||||||
|
// UserFirstTimeSetup is an extension of the user model.
|
||||||
|
func UserFirstTimeSetup(db *sql.DB, u *models.User) error { ... }
|
||||||
|
```
|
||||||
|
|
||||||
|
Code organization is accomplished by using multiple files, and everything
|
||||||
|
is passed as a parameter so these kinds of methods are very easy to test.
|
||||||
|
|
||||||
|
Calling code is also very straightforward:
|
||||||
|
|
||||||
|
```go
|
||||||
|
user, err := Users(db).One()
|
||||||
|
// elided error check
|
||||||
|
|
||||||
|
err = modext.UserFirstTimeSetup(db, user)
|
||||||
|
// elided error check
|
||||||
|
```
|
||||||
|
|
||||||
|
**Method 2: Empty struct methods**
|
||||||
|
|
||||||
|
The above is the best way to code extensions for SQLBoiler, however there may
|
||||||
|
be times when the number of methods grows too large and code completion is
|
||||||
|
not as helpful anymore. In these cases you may consider structuring the code
|
||||||
|
like this:
|
||||||
|
|
||||||
|
```go
|
||||||
|
// Package modext is for SQLBoiler helper methods
|
||||||
|
package modext
|
||||||
|
|
||||||
|
type users struct {}
|
||||||
|
|
||||||
|
var Users = users{}
|
||||||
|
|
||||||
|
// FirstTimeSetup is an extension of the user model.
|
||||||
|
func (u users) FirstTimeSetup(db *sql.DB, u *models.User) error { ... }
|
||||||
|
```
|
||||||
|
|
||||||
|
Calling code then looks a little bit different:
|
||||||
|
|
||||||
|
```go
|
||||||
|
user, err := Users(db).One()
|
||||||
|
// elided error check
|
||||||
|
|
||||||
|
err = modext.Users.FirstTimeSetup(db, user)
|
||||||
|
// elided error check
|
||||||
|
```
|
||||||
|
|
||||||
|
This is almost identical to the method above, but gives slight amounts more
|
||||||
|
organization at virtually no cost at runtime. It is however not as desirable
|
||||||
|
as the first method since it does have some runtime cost and doesn't offer that
|
||||||
|
much benefit over it.
|
||||||
|
|
||||||
|
**Method 3: Embedding**
|
||||||
|
|
||||||
|
This pattern is not for the faint of heart, what it provides in benefits it
|
||||||
|
more than makes up for in downsides. It's possible to embed the SQLBoiler
|
||||||
|
structs inside your own to enhance them. However it's subject to easy breakages
|
||||||
|
and a dependency on these additional objects. It can also introduce
|
||||||
|
inconsistencies as some objects may have no extended functionality and therefore
|
||||||
|
have no reason to be embedded so you either have to have a struct for each
|
||||||
|
generated struct even if it's empty, or have inconsistencies, some places where
|
||||||
|
you use the enhanced model, and some where you do not.
|
||||||
|
|
||||||
|
```go
|
||||||
|
user, err := Users(db).One()
|
||||||
|
// elided error check
|
||||||
|
|
||||||
|
enhUser := modext.User{user}
|
||||||
|
err = ehnUser.FirstTimeSetup(db)
|
||||||
|
// elided error check
|
||||||
|
```
|
||||||
|
|
||||||
|
I don't recommend this pattern, but included it so that people know it's an
|
||||||
|
option and also know the problems with it.
|
||||||
|
|
||||||
## Diagnosing Problems
|
## Diagnosing Problems
|
||||||
|
|
||||||
The most common causes of problems and panics are:
|
The most common causes of problems and panics are:
|
||||||
|
@ -341,7 +470,7 @@ Most examples in this section will be demonstrated using the following Postgres
|
||||||
```sql
|
```sql
|
||||||
CREATE TABLE pilots (
|
CREATE TABLE pilots (
|
||||||
id integer NOT NULL,
|
id integer NOT NULL,
|
||||||
name text NOT NULL,
|
name text NOT NULL
|
||||||
);
|
);
|
||||||
|
|
||||||
ALTER TABLE pilots ADD CONSTRAINT pilot_pkey PRIMARY KEY (id);
|
ALTER TABLE pilots ADD CONSTRAINT pilot_pkey PRIMARY KEY (id);
|
||||||
|
@ -351,11 +480,11 @@ CREATE TABLE jets (
|
||||||
pilot_id integer NOT NULL,
|
pilot_id integer NOT NULL,
|
||||||
age integer NOT NULL,
|
age integer NOT NULL,
|
||||||
name text NOT NULL,
|
name text NOT NULL,
|
||||||
color text NOT NULL,
|
color text NOT NULL
|
||||||
);
|
);
|
||||||
|
|
||||||
ALTER TABLE jets ADD CONSTRAINT jet_pkey PRIMARY KEY (id);
|
ALTER TABLE jets ADD CONSTRAINT jet_pkey PRIMARY KEY (id);
|
||||||
ALTER TABLE jets ADD CONSTRAINT pilots_fkey FOREIGN KEY (pilot_id) REFERENCES pilots(id);
|
ALTER TABLE jets ADD CONSTRAINT jet_pilots_fkey FOREIGN KEY (pilot_id) REFERENCES pilots(id);
|
||||||
|
|
||||||
CREATE TABLE languages (
|
CREATE TABLE languages (
|
||||||
id integer NOT NULL,
|
id integer NOT NULL,
|
||||||
|
@ -372,8 +501,8 @@ CREATE TABLE pilot_languages (
|
||||||
|
|
||||||
-- Composite primary key
|
-- Composite primary key
|
||||||
ALTER TABLE pilot_languages ADD CONSTRAINT pilot_language_pkey PRIMARY KEY (pilot_id, language_id);
|
ALTER TABLE pilot_languages ADD CONSTRAINT pilot_language_pkey PRIMARY KEY (pilot_id, language_id);
|
||||||
ALTER TABLE pilot_languages ADD CONSTRAINT pilots_fkey FOREIGN KEY (pilot_id) REFERENCES pilots(id);
|
ALTER TABLE pilot_languages ADD CONSTRAINT pilot_language_pilots_fkey FOREIGN KEY (pilot_id) REFERENCES pilots(id);
|
||||||
ALTER TABLE pilot_languages ADD CONSTRAINT languages_fkey FOREIGN KEY (language_id) REFERENCES languages(id);
|
ALTER TABLE pilot_languages ADD CONSTRAINT pilot_language_languages_fkey FOREIGN KEY (language_id) REFERENCES languages(id);
|
||||||
```
|
```
|
||||||
|
|
||||||
The generated model structs for this schema look like the following. Note that we've included the relationship
|
The generated model structs for this schema look like the following. Note that we've included the relationship
|
||||||
|
@ -557,6 +686,9 @@ err := pilot.Delete(db) // Regular variant, takes a db handle (boil.Executor int
|
||||||
pilot.DeleteP(db) // Panic variant, takes a db handle and panics on error.
|
pilot.DeleteP(db) // Panic variant, takes a db handle and panics on error.
|
||||||
err := pilot.DeleteG() // Global variant, uses the globally set db handle (boil.SetDB()).
|
err := pilot.DeleteG() // Global variant, uses the globally set db handle (boil.SetDB()).
|
||||||
pilot.DeleteGP() // Global&Panic variant, combines the global db handle and panic on error.
|
pilot.DeleteGP() // Global&Panic variant, combines the global db handle and panic on error.
|
||||||
|
|
||||||
|
db.Begin() // Normal sql package way of creating a transaction
|
||||||
|
boil.Begin() // Uses the global database handle set by boil.SetDB()
|
||||||
```
|
```
|
||||||
|
|
||||||
Note that it's slightly different for query building.
|
Note that it's slightly different for query building.
|
||||||
|
@ -824,15 +956,16 @@ it with the `AddModelHook` method. Here is an example of a before insert hook:
|
||||||
|
|
||||||
```go
|
```go
|
||||||
// Define my hook function
|
// Define my hook function
|
||||||
func myHook(exec boil.Executor, p *Pilot) {
|
func myHook(exec boil.Executor, p *Pilot) error {
|
||||||
// Do stuff
|
// Do stuff
|
||||||
|
return nil
|
||||||
}
|
}
|
||||||
|
|
||||||
// Register my before insert hook for pilots
|
// Register my before insert hook for pilots
|
||||||
models.AddPilotHook(boil.BeforeInsertHook, myHook)
|
models.AddPilotHook(boil.BeforeInsertHook, myHook)
|
||||||
```
|
```
|
||||||
|
|
||||||
Your `ModelHook` will always be defined as `func(boil.Executor, *Model)`
|
Your `ModelHook` will always be defined as `func(boil.Executor, *Model) error`
|
||||||
|
|
||||||
### Transactions
|
### Transactions
|
||||||
|
|
||||||
|
@ -856,6 +989,10 @@ tx.Commit()
|
||||||
tx.Rollback()
|
tx.Rollback()
|
||||||
```
|
```
|
||||||
|
|
||||||
|
It's also worth noting that there's a way to take advantage of `boil.SetDB()`
|
||||||
|
by using the [boil.Begin()](https://godoc.org/github.com/vattle/sqlboiler/boil#Begin) function.
|
||||||
|
This opens a transaction using the globally stored database.
|
||||||
|
|
||||||
### Debug Logging
|
### Debug Logging
|
||||||
|
|
||||||
Debug logging will print your generated SQL statement and the arguments it is using.
|
Debug logging will print your generated SQL statement and the arguments it is using.
|
||||||
|
@ -1132,51 +1269,78 @@ generator is located at: https://github.com/vattle/sqlboiler
|
||||||
|
|
||||||
If you'd like to run the benchmarks yourself check out our [boilbench](https://github.com/vattle/boilbench) repo.
|
If you'd like to run the benchmarks yourself check out our [boilbench](https://github.com/vattle/boilbench) repo.
|
||||||
|
|
||||||
Here are the results (lower is better):
|
```bash
|
||||||
|
go test -bench . -benchmem
|
||||||
`go test -bench . -benchmem`
|
|
||||||
```
|
|
||||||
BenchmarkGORMDelete/gorm-8 100000 15364 ns/op 5395 B/op 113 allocs/op
|
|
||||||
BenchmarkGORPDelete/gorp-8 1000000 1703 ns/op 304 B/op 12 allocs/op
|
|
||||||
BenchmarkXORMDelete/xorm-8 100000 14733 ns/op 3634 B/op 107 allocs/op
|
|
||||||
BenchmarkBoilDelete/boil-8 2000000 986 ns/op 120 B/op 7 allocs/op
|
|
||||||
|
|
||||||
BenchmarkGORMInsert/gorm-8 100000 19197 ns/op 8054 B/op 161 allocs/op
|
|
||||||
BenchmarkGORPInsert/gorp-8 500000 3413 ns/op 1008 B/op 32 allocs/op
|
|
||||||
BenchmarkXORMInsert/xorm-8 100000 15428 ns/op 5836 B/op 131 allocs/op
|
|
||||||
BenchmarkBoilInsert/boil-8 500000 3041 ns/op 568 B/op 21 allocs/op
|
|
||||||
|
|
||||||
BenchmarkGORMSelectAll/gorm-8 20000 85422 ns/op 29912 B/op 511 allocs/op
|
|
||||||
BenchmarkGORPSelectAll/gorp-8 50000 35824 ns/op 8837 B/op 312 allocs/op
|
|
||||||
BenchmarkXORMSelectAll/xorm-8 30000 58843 ns/op 13805 B/op 298 allocs/op
|
|
||||||
BenchmarkBoilSelectAll/boil-8 100000 13844 ns/op 2840 B/op 61 allocs/op
|
|
||||||
|
|
||||||
BenchmarkGORMSelectSubset/gorm-8 10000 100714 ns/op 30875 B/op 517 allocs/op
|
|
||||||
BenchmarkGORPSelectSubset/gorp-8 30000 43547 ns/op 8837 B/op 312 allocs/op
|
|
||||||
BenchmarkXORMSelectSubset/xorm-8 30000 48128 ns/op 12989 B/op 282 allocs/op
|
|
||||||
BenchmarkBoilSelectSubset/boil-8 100000 12316 ns/op 2977 B/op 65 allocs/op
|
|
||||||
|
|
||||||
BenchmarkGORMSelectComplex/gorm-8 10000 133598 ns/op 49398 B/op 772 allocs/op
|
|
||||||
BenchmarkGORPSelectComplex/gorp-8 50000 40588 ns/op 9037 B/op 321 allocs/op
|
|
||||||
BenchmarkXORMSelectComplex/xorm-8 30000 56367 ns/op 14174 B/op 313 allocs/op
|
|
||||||
BenchmarkBoilSelectComplex/boil-8 100000 16941 ns/op 3821 B/op 95 allocs/op
|
|
||||||
|
|
||||||
BenchmarkGORMUpdate/gorm-8 50000 25406 ns/op 9710 B/op 195 allocs/op
|
|
||||||
BenchmarkGORPUpdate/gorp-8 300000 3614 ns/op 1152 B/op 34 allocs/op
|
|
||||||
BenchmarkXORMUpdate/xorm-8 100000 17510 ns/op 4458 B/op 132 allocs/op
|
|
||||||
BenchmarkBoilUpdate/boil-8 500000 2958 ns/op 520 B/op 16 allocs/op
|
|
||||||
|
|
||||||
BenchmarkGORMRawBind/gorm-8 10000 112577 ns/op 38270 B/op 595 allocs/op
|
|
||||||
BenchmarkGORPRawBind/gorp-8 30000 40967 ns/op 8837 B/op 312 allocs/op
|
|
||||||
BenchmarkXORMRawBind/xorm-8 30000 54739 ns/op 12692 B/op 273 allocs/op
|
|
||||||
BenchmarkSQLXRawBind/sqlx-8 200000 13537 ns/op 4268 B/op 49 allocs/op
|
|
||||||
BenchmarkBoilRawBind/boil-8 200000 11144 ns/op 4334 B/op 49 allocs/op
|
|
||||||
```
|
```
|
||||||
|
|
||||||
<img style="margin-right:6px;" src="http://i.imgur.com/TglZGoI.png"/>
|
### Results (lower is better)
|
||||||
<img style="margin-right:6px;" src="http://i.imgur.com/Ktm2ta4.png"/>
|
|
||||||
<img style="margin-right:6px;" src="http://i.imgur.com/yv8kFPA.png"/>
|
Test machine:
|
||||||
<img style="margin-right:6px;" src="http://i.imgur.com/890Zswe.png"/>
|
```text
|
||||||
<img style="margin-right:6px;" src="http://i.imgur.com/qMgoAFJ.png"/>
|
OS: Ubuntu 16.04
|
||||||
<img style="margin-right:6px;" src="http://i.imgur.com/sDoNiCN.png"/>
|
CPU: Intel(R) Core(TM) i7-4771 CPU @ 3.50GHz
|
||||||
<img style="margin-right:6px;" src="http://i.imgur.com/EvUa4UT.png"/>
|
Mem: 16GB
|
||||||
|
Go: go version go1.8.1 linux/amd64
|
||||||
|
```
|
||||||
|
|
||||||
|
The graphs below have many runs like this as input to calculate errors. Here
|
||||||
|
is a sample run:
|
||||||
|
|
||||||
|
```text
|
||||||
|
BenchmarkGORMSelectAll/gorm-8 20000 66500 ns/op 28998 B/op 455 allocs/op
|
||||||
|
BenchmarkGORPSelectAll/gorp-8 50000 31305 ns/op 9141 B/op 318 allocs/op
|
||||||
|
BenchmarkXORMSelectAll/xorm-8 20000 66074 ns/op 16317 B/op 417 allocs/op
|
||||||
|
BenchmarkKallaxSelectAll/kallax-8 100000 18278 ns/op 7428 B/op 145 allocs/op
|
||||||
|
BenchmarkBoilSelectAll/boil-8 100000 12759 ns/op 3145 B/op 67 allocs/op
|
||||||
|
|
||||||
|
BenchmarkGORMSelectSubset/gorm-8 20000 69469 ns/op 30008 B/op 462 allocs/op
|
||||||
|
BenchmarkGORPSelectSubset/gorp-8 50000 31102 ns/op 9141 B/op 318 allocs/op
|
||||||
|
BenchmarkXORMSelectSubset/xorm-8 20000 64151 ns/op 15933 B/op 414 allocs/op
|
||||||
|
BenchmarkKallaxSelectSubset/kallax-8 100000 16996 ns/op 6499 B/op 132 allocs/op
|
||||||
|
BenchmarkBoilSelectSubset/boil-8 100000 13579 ns/op 3281 B/op 71 allocs/op
|
||||||
|
|
||||||
|
BenchmarkGORMSelectComplex/gorm-8 20000 76284 ns/op 34566 B/op 521 allocs/op
|
||||||
|
BenchmarkGORPSelectComplex/gorp-8 50000 31886 ns/op 9501 B/op 328 allocs/op
|
||||||
|
BenchmarkXORMSelectComplex/xorm-8 20000 68430 ns/op 17694 B/op 464 allocs/op
|
||||||
|
BenchmarkKallaxSelectComplex/kallax-8 50000 26095 ns/op 10293 B/op 212 allocs/op
|
||||||
|
BenchmarkBoilSelectComplex/boil-8 100000 16403 ns/op 4205 B/op 102 allocs/op
|
||||||
|
|
||||||
|
BenchmarkGORMDelete/gorm-8 200000 10356 ns/op 5059 B/op 98 allocs/op
|
||||||
|
BenchmarkGORPDelete/gorp-8 1000000 1335 ns/op 352 B/op 13 allocs/op
|
||||||
|
BenchmarkXORMDelete/xorm-8 200000 10796 ns/op 4146 B/op 122 allocs/op
|
||||||
|
BenchmarkKallaxDelete/kallax-8 300000 5141 ns/op 2241 B/op 48 allocs/op
|
||||||
|
BenchmarkBoilDelete/boil-8 2000000 796 ns/op 168 B/op 8 allocs/op
|
||||||
|
|
||||||
|
BenchmarkGORMInsert/gorm-8 100000 15238 ns/op 8278 B/op 150 allocs/op
|
||||||
|
BenchmarkGORPInsert/gorp-8 300000 4648 ns/op 1616 B/op 38 allocs/op
|
||||||
|
BenchmarkXORMInsert/xorm-8 100000 12600 ns/op 6092 B/op 138 allocs/op
|
||||||
|
BenchmarkKallaxInsert/kallax-8 100000 15115 ns/op 6003 B/op 126 allocs/op
|
||||||
|
BenchmarkBoilInsert/boil-8 1000000 2249 ns/op 984 B/op 23 allocs/op
|
||||||
|
|
||||||
|
BenchmarkGORMUpdate/gorm-8 100000 18609 ns/op 9389 B/op 174 allocs/op
|
||||||
|
BenchmarkGORPUpdate/gorp-8 500000 3180 ns/op 1536 B/op 35 allocs/op
|
||||||
|
BenchmarkXORMUpdate/xorm-8 100000 13149 ns/op 5098 B/op 149 allocs/op
|
||||||
|
BenchmarkKallaxUpdate/kallax-8 100000 22880 ns/op 11366 B/op 219 allocs/op
|
||||||
|
BenchmarkBoilUpdate/boil-8 1000000 1810 ns/op 936 B/op 18 allocs/op
|
||||||
|
|
||||||
|
BenchmarkGORMRawBind/gorm-8 20000 65821 ns/op 30502 B/op 444 allocs/op
|
||||||
|
BenchmarkGORPRawBind/gorp-8 50000 31300 ns/op 9141 B/op 318 allocs/op
|
||||||
|
BenchmarkXORMRawBind/xorm-8 20000 62024 ns/op 15588 B/op 403 allocs/op
|
||||||
|
BenchmarkKallaxRawBind/kallax-8 200000 7843 ns/op 4380 B/op 46 allocs/op
|
||||||
|
BenchmarkSQLXRawBind/sqlx-8 100000 13056 ns/op 4572 B/op 55 allocs/op
|
||||||
|
BenchmarkBoilRawBind/boil-8 200000 11519 ns/op 4638 B/op 55 allocs/op
|
||||||
|
```
|
||||||
|
|
||||||
|
<img src="http://i.imgur.com/SltE8UQ.png"/><img src="http://i.imgur.com/lzvM5jJ.png"/><img src="http://i.imgur.com/SS0zNd2.png"/>
|
||||||
|
|
||||||
|
<img src="http://i.imgur.com/Kk0IM0J.png"/><img src="http://i.imgur.com/1IFtpdP.png"/><img src="http://i.imgur.com/t6Usecx.png"/>
|
||||||
|
|
||||||
|
<img src="http://i.imgur.com/98DOzcr.png"/><img src="http://i.imgur.com/NSp5r4Q.png"/><img src="http://i.imgur.com/dEGlOgI.png"/>
|
||||||
|
|
||||||
|
<img src="http://i.imgur.com/W0zhuGb.png"/><img src="http://i.imgur.com/YIvDuFv.png"/><img src="http://i.imgur.com/sKwuMaU.png"/>
|
||||||
|
|
||||||
|
<img src="http://i.imgur.com/ZUMYVmw.png"/><img src="http://i.imgur.com/T61rH3K.png"/><img src="http://i.imgur.com/lDr0xhY.png"/>
|
||||||
|
|
||||||
|
<img src="http://i.imgur.com/LWo10M9.png"/><img src="http://i.imgur.com/Td15owT.png"/><img src="http://i.imgur.com/45XXw4K.png"/>
|
||||||
|
|
||||||
|
<img src="http://i.imgur.com/lpP8qds.png"/><img src="http://i.imgur.com/hLyH3jQ.png"/><img src="http://i.imgur.com/C2v10t3.png"/>
|
||||||
|
|
|
@ -3,7 +3,7 @@ package bdb
|
||||||
import (
|
import (
|
||||||
"strings"
|
"strings"
|
||||||
|
|
||||||
"github.com/vattle/sqlboiler/strmangle"
|
"github.com/lbryio/sqlboiler/strmangle"
|
||||||
)
|
)
|
||||||
|
|
||||||
// Column holds information about a database column.
|
// Column holds information about a database column.
|
||||||
|
@ -23,6 +23,17 @@ type Column struct {
|
||||||
// https://www.postgresql.org/docs/9.1/static/infoschema-element-types.html
|
// https://www.postgresql.org/docs/9.1/static/infoschema-element-types.html
|
||||||
ArrType *string
|
ArrType *string
|
||||||
UDTName string
|
UDTName string
|
||||||
|
|
||||||
|
// MySQL only bits
|
||||||
|
// Used to get full type, ex:
|
||||||
|
// tinyint(1) instead of tinyint
|
||||||
|
// Used for "tinyint-as-bool" flag
|
||||||
|
FullDBType string
|
||||||
|
|
||||||
|
// MS SQL only bits
|
||||||
|
// Used to indicate that the value
|
||||||
|
// for this column is auto generated by database on insert (i.e. - timestamp (old) or rowversion (new))
|
||||||
|
AutoGenerated bool
|
||||||
}
|
}
|
||||||
|
|
||||||
// ColumnNames of the columns.
|
// ColumnNames of the columns.
|
||||||
|
@ -46,6 +57,19 @@ func ColumnDBTypes(cols []Column) map[string]string {
|
||||||
return types
|
return types
|
||||||
}
|
}
|
||||||
|
|
||||||
|
// FilterColumnsByAuto generates the list of columns that have autogenerated values
|
||||||
|
func FilterColumnsByAuto(auto bool, columns []Column) []Column {
|
||||||
|
var cols []Column
|
||||||
|
|
||||||
|
for _, c := range columns {
|
||||||
|
if (auto && c.AutoGenerated) || (!auto && !c.AutoGenerated) {
|
||||||
|
cols = append(cols, c)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
return cols
|
||||||
|
}
|
||||||
|
|
||||||
// FilterColumnsByDefault generates the list of columns that have default values
|
// FilterColumnsByDefault generates the list of columns that have default values
|
||||||
func FilterColumnsByDefault(defaults bool, columns []Column) []Column {
|
func FilterColumnsByDefault(defaults bool, columns []Column) []Column {
|
||||||
var cols []Column
|
var cols []Column
|
||||||
|
|
|
@ -1,8 +1,8 @@
|
||||||
package drivers
|
package drivers
|
||||||
|
|
||||||
import (
|
import (
|
||||||
"github.com/vattle/sqlboiler/bdb"
|
"github.com/lbryio/sqlboiler/bdb"
|
||||||
"github.com/vattle/sqlboiler/strmangle"
|
"github.com/lbryio/sqlboiler/strmangle"
|
||||||
)
|
)
|
||||||
|
|
||||||
// MockDriver is a mock implementation of the bdb driver Interface
|
// MockDriver is a mock implementation of the bdb driver Interface
|
||||||
|
@ -58,6 +58,14 @@ func (m *MockDriver) Columns(schema, tableName string) ([]bdb.Column, error) {
|
||||||
}[tableName], nil
|
}[tableName], nil
|
||||||
}
|
}
|
||||||
|
|
||||||
|
func (m *MockDriver) UniqueKeyInfo(schema, tableName string) ([]bdb.UniqueKey, error) {
|
||||||
|
return []bdb.UniqueKey{}, nil
|
||||||
|
}
|
||||||
|
|
||||||
|
func (m *MockDriver) AutoincrementInfo(schema, tableName string) (string, error) {
|
||||||
|
return "", nil
|
||||||
|
}
|
||||||
|
|
||||||
// ForeignKeyInfo returns a list of mock foreignkeys
|
// ForeignKeyInfo returns a list of mock foreignkeys
|
||||||
func (m *MockDriver) ForeignKeyInfo(schema, tableName string) ([]bdb.ForeignKey, error) {
|
func (m *MockDriver) ForeignKeyInfo(schema, tableName string) ([]bdb.ForeignKey, error) {
|
||||||
return map[string][]bdb.ForeignKey{
|
return map[string][]bdb.ForeignKey{
|
||||||
|
@ -118,6 +126,9 @@ func (m *MockDriver) PrimaryKeyInfo(schema, tableName string) (*bdb.PrimaryKey,
|
||||||
// UseLastInsertID returns a database mock LastInsertID compatibility flag
|
// UseLastInsertID returns a database mock LastInsertID compatibility flag
|
||||||
func (m *MockDriver) UseLastInsertID() bool { return false }
|
func (m *MockDriver) UseLastInsertID() bool { return false }
|
||||||
|
|
||||||
|
// UseTopClause returns a database mock SQL TOP clause compatibility flag
|
||||||
|
func (m *MockDriver) UseTopClause() bool { return false }
|
||||||
|
|
||||||
// Open mimics a database open call and returns nil for no error
|
// Open mimics a database open call and returns nil for no error
|
||||||
func (m *MockDriver) Open() error { return nil }
|
func (m *MockDriver) Open() error { return nil }
|
||||||
|
|
||||||
|
|
382
bdb/drivers/mssql.go
Normal file
382
bdb/drivers/mssql.go
Normal file
|
@ -0,0 +1,382 @@
|
||||||
|
package drivers
|
||||||
|
|
||||||
|
import (
|
||||||
|
"database/sql"
|
||||||
|
"fmt"
|
||||||
|
"net/url"
|
||||||
|
"strings"
|
||||||
|
|
||||||
|
_ "github.com/denisenkom/go-mssqldb"
|
||||||
|
"github.com/lbryio/sqlboiler/bdb"
|
||||||
|
"github.com/pkg/errors"
|
||||||
|
)
|
||||||
|
|
||||||
|
// MSSQLDriver holds the database connection string and a handle
|
||||||
|
// to the database connection.
|
||||||
|
type MSSQLDriver struct {
|
||||||
|
connStr string
|
||||||
|
dbConn *sql.DB
|
||||||
|
}
|
||||||
|
|
||||||
|
// NewMSSQLDriver takes the database connection details as parameters and
|
||||||
|
// returns a pointer to a MSSQLDriver object. Note that it is required to
|
||||||
|
// call MSSQLDriver.Open() and MSSQLDriver.Close() to open and close
|
||||||
|
// the database connection once an object has been obtained.
|
||||||
|
func NewMSSQLDriver(user, pass, dbname, host string, port int, sslmode string) *MSSQLDriver {
|
||||||
|
driver := MSSQLDriver{
|
||||||
|
connStr: MSSQLBuildQueryString(user, pass, dbname, host, port, sslmode),
|
||||||
|
}
|
||||||
|
|
||||||
|
return &driver
|
||||||
|
}
|
||||||
|
|
||||||
|
// MSSQLBuildQueryString builds a query string for MSSQL.
|
||||||
|
func MSSQLBuildQueryString(user, pass, dbname, host string, port int, sslmode string) string {
|
||||||
|
query := url.Values{}
|
||||||
|
query.Add("database", dbname)
|
||||||
|
query.Add("encrypt", sslmode)
|
||||||
|
|
||||||
|
u := &url.URL{
|
||||||
|
Scheme: "sqlserver",
|
||||||
|
User: url.UserPassword(user, pass),
|
||||||
|
Host: fmt.Sprintf("%s:%d", host, port),
|
||||||
|
// Path: instance, // if connecting to an instance instead of a port
|
||||||
|
RawQuery: query.Encode(),
|
||||||
|
}
|
||||||
|
|
||||||
|
return u.String()
|
||||||
|
}
|
||||||
|
|
||||||
|
// Open opens the database connection using the connection string
|
||||||
|
func (m *MSSQLDriver) Open() error {
|
||||||
|
var err error
|
||||||
|
m.dbConn, err = sql.Open("mssql", m.connStr)
|
||||||
|
if err != nil {
|
||||||
|
return err
|
||||||
|
}
|
||||||
|
|
||||||
|
return nil
|
||||||
|
}
|
||||||
|
|
||||||
|
// Close closes the database connection
|
||||||
|
func (m *MSSQLDriver) Close() {
|
||||||
|
m.dbConn.Close()
|
||||||
|
}
|
||||||
|
|
||||||
|
// UseLastInsertID returns false for mssql
|
||||||
|
func (m *MSSQLDriver) UseLastInsertID() bool {
|
||||||
|
return false
|
||||||
|
}
|
||||||
|
|
||||||
|
// UseTopClause returns true to indicate MS SQL supports SQL TOP clause
|
||||||
|
func (m *MSSQLDriver) UseTopClause() bool {
|
||||||
|
return true
|
||||||
|
}
|
||||||
|
|
||||||
|
// TableNames connects to the postgres database and
|
||||||
|
// retrieves all table names from the information_schema where the
|
||||||
|
// table schema is schema. It uses a whitelist and blacklist.
|
||||||
|
func (m *MSSQLDriver) TableNames(schema string, whitelist, blacklist []string) ([]string, error) {
|
||||||
|
var names []string
|
||||||
|
|
||||||
|
query := `
|
||||||
|
SELECT table_name
|
||||||
|
FROM information_schema.tables
|
||||||
|
WHERE table_schema = ? AND table_type = 'BASE TABLE'`
|
||||||
|
|
||||||
|
args := []interface{}{schema}
|
||||||
|
if len(whitelist) > 0 {
|
||||||
|
query += fmt.Sprintf(" AND table_name IN (%s);", strings.Repeat(",?", len(whitelist))[1:])
|
||||||
|
for _, w := range whitelist {
|
||||||
|
args = append(args, w)
|
||||||
|
}
|
||||||
|
} else if len(blacklist) > 0 {
|
||||||
|
query += fmt.Sprintf(" AND table_name not IN (%s);", strings.Repeat(",?", len(blacklist))[1:])
|
||||||
|
for _, b := range blacklist {
|
||||||
|
args = append(args, b)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
rows, err := m.dbConn.Query(query, args...)
|
||||||
|
|
||||||
|
if err != nil {
|
||||||
|
return nil, err
|
||||||
|
}
|
||||||
|
|
||||||
|
defer rows.Close()
|
||||||
|
for rows.Next() {
|
||||||
|
var name string
|
||||||
|
if err := rows.Scan(&name); err != nil {
|
||||||
|
return nil, err
|
||||||
|
}
|
||||||
|
names = append(names, name)
|
||||||
|
}
|
||||||
|
|
||||||
|
return names, nil
|
||||||
|
}
|
||||||
|
|
||||||
|
// Columns takes a table name and attempts to retrieve the table information
|
||||||
|
// from the database information_schema.columns. It retrieves the column names
|
||||||
|
// and column types and returns those as a []Column after TranslateColumnType()
|
||||||
|
// converts the SQL types to Go types, for example: "varchar" to "string"
|
||||||
|
func (m *MSSQLDriver) Columns(schema, tableName string) ([]bdb.Column, error) {
|
||||||
|
var columns []bdb.Column
|
||||||
|
|
||||||
|
rows, err := m.dbConn.Query(`
|
||||||
|
SELECT column_name,
|
||||||
|
CASE
|
||||||
|
WHEN character_maximum_length IS NULL THEN data_type
|
||||||
|
ELSE data_type + '(' + CAST(character_maximum_length AS VARCHAR) + ')'
|
||||||
|
END AS full_type,
|
||||||
|
data_type,
|
||||||
|
column_default,
|
||||||
|
CASE
|
||||||
|
WHEN is_nullable = 'YES' THEN 1
|
||||||
|
ELSE 0
|
||||||
|
END AS is_nullable,
|
||||||
|
CASE
|
||||||
|
WHEN EXISTS (SELECT c.column_name
|
||||||
|
FROM information_schema.table_constraints tc
|
||||||
|
INNER JOIN information_schema.key_column_usage kcu
|
||||||
|
ON tc.constraint_name = kcu.constraint_name
|
||||||
|
AND tc.table_name = kcu.table_name
|
||||||
|
AND tc.table_schema = kcu.table_schema
|
||||||
|
WHERE c.column_name = kcu.column_name
|
||||||
|
AND tc.table_name = c.table_name
|
||||||
|
AND (tc.constraint_type = 'PRIMARY KEY' OR tc.constraint_type = 'UNIQUE')
|
||||||
|
AND (SELECT COUNT(*)
|
||||||
|
FROM information_schema.key_column_usage
|
||||||
|
WHERE table_schema = kcu.table_schema
|
||||||
|
AND table_name = tc.table_name
|
||||||
|
AND constraint_name = tc.constraint_name) = 1) THEN 1
|
||||||
|
ELSE 0
|
||||||
|
END AS is_unique,
|
||||||
|
COLUMNPROPERTY(object_id($1 + '.' + $2), c.column_name, 'IsIdentity') as is_identity
|
||||||
|
FROM information_schema.columns c
|
||||||
|
WHERE table_schema = $1 AND table_name = $2;
|
||||||
|
`, schema, tableName)
|
||||||
|
|
||||||
|
if err != nil {
|
||||||
|
return nil, err
|
||||||
|
}
|
||||||
|
defer rows.Close()
|
||||||
|
|
||||||
|
for rows.Next() {
|
||||||
|
var colName, colType, colFullType string
|
||||||
|
var nullable, unique, identity, auto bool
|
||||||
|
var defaultValue *string
|
||||||
|
if err := rows.Scan(&colName, &colFullType, &colType, &defaultValue, &nullable, &unique, &identity); err != nil {
|
||||||
|
return nil, errors.Wrapf(err, "unable to scan for table %s", tableName)
|
||||||
|
}
|
||||||
|
|
||||||
|
auto = strings.EqualFold(colType, "timestamp") || strings.EqualFold(colType, "rowversion")
|
||||||
|
|
||||||
|
column := bdb.Column{
|
||||||
|
Name: colName,
|
||||||
|
FullDBType: colFullType,
|
||||||
|
DBType: colType,
|
||||||
|
Nullable: nullable,
|
||||||
|
Unique: unique,
|
||||||
|
AutoGenerated: auto,
|
||||||
|
}
|
||||||
|
|
||||||
|
if defaultValue != nil && *defaultValue != "NULL" {
|
||||||
|
column.Default = *defaultValue
|
||||||
|
} else if identity || auto {
|
||||||
|
column.Default = "auto"
|
||||||
|
}
|
||||||
|
columns = append(columns, column)
|
||||||
|
}
|
||||||
|
|
||||||
|
return columns, nil
|
||||||
|
}
|
||||||
|
|
||||||
|
// PrimaryKeyInfo looks up the primary key for a table.
|
||||||
|
func (m *MSSQLDriver) PrimaryKeyInfo(schema, tableName string) (*bdb.PrimaryKey, error) {
|
||||||
|
pkey := &bdb.PrimaryKey{}
|
||||||
|
var err error
|
||||||
|
|
||||||
|
query := `
|
||||||
|
SELECT constraint_name
|
||||||
|
FROM information_schema.table_constraints
|
||||||
|
WHERE table_name = ? AND constraint_type = 'PRIMARY KEY' AND table_schema = ?;`
|
||||||
|
|
||||||
|
row := m.dbConn.QueryRow(query, tableName, schema)
|
||||||
|
if err = row.Scan(&pkey.Name); err != nil {
|
||||||
|
if err == sql.ErrNoRows {
|
||||||
|
return nil, nil
|
||||||
|
}
|
||||||
|
return nil, err
|
||||||
|
}
|
||||||
|
|
||||||
|
queryColumns := `
|
||||||
|
SELECT column_name
|
||||||
|
FROM information_schema.key_column_usage
|
||||||
|
WHERE table_name = ? AND constraint_name = ? AND table_schema = ?;`
|
||||||
|
|
||||||
|
var rows *sql.Rows
|
||||||
|
if rows, err = m.dbConn.Query(queryColumns, tableName, pkey.Name, schema); err != nil {
|
||||||
|
return nil, err
|
||||||
|
}
|
||||||
|
defer rows.Close()
|
||||||
|
|
||||||
|
var columns []string
|
||||||
|
for rows.Next() {
|
||||||
|
var column string
|
||||||
|
|
||||||
|
err = rows.Scan(&column)
|
||||||
|
if err != nil {
|
||||||
|
return nil, err
|
||||||
|
}
|
||||||
|
|
||||||
|
columns = append(columns, column)
|
||||||
|
}
|
||||||
|
|
||||||
|
if err = rows.Err(); err != nil {
|
||||||
|
return nil, err
|
||||||
|
}
|
||||||
|
|
||||||
|
pkey.Columns = columns
|
||||||
|
|
||||||
|
return pkey, nil
|
||||||
|
}
|
||||||
|
|
||||||
|
func (m *MSSQLDriver) UniqueKeyInfo(schema, tableName string) ([]bdb.UniqueKey, error) {
|
||||||
|
return []bdb.UniqueKey{}, errors.New("not implemented")
|
||||||
|
}
|
||||||
|
|
||||||
|
func (m *MSSQLDriver) AutoincrementInfo(schema, tableName string) (string, error) {
|
||||||
|
return "", errors.New("not implemented")
|
||||||
|
}
|
||||||
|
|
||||||
|
// ForeignKeyInfo retrieves the foreign keys for a given table name.
|
||||||
|
func (m *MSSQLDriver) ForeignKeyInfo(schema, tableName string) ([]bdb.ForeignKey, error) {
|
||||||
|
var fkeys []bdb.ForeignKey
|
||||||
|
|
||||||
|
query := `
|
||||||
|
SELECT ccu.constraint_name ,
|
||||||
|
ccu.table_name AS local_table ,
|
||||||
|
ccu.column_name AS local_column ,
|
||||||
|
kcu.table_name AS foreign_table ,
|
||||||
|
kcu.column_name AS foreign_column
|
||||||
|
FROM information_schema.constraint_column_usage ccu
|
||||||
|
INNER JOIN information_schema.referential_constraints rc ON ccu.constraint_name = rc.constraint_name
|
||||||
|
INNER JOIN information_schema.key_column_usage kcu ON kcu.constraint_name = rc.unique_constraint_name
|
||||||
|
WHERE ccu.table_schema = ?
|
||||||
|
AND ccu.constraint_schema = ?
|
||||||
|
AND ccu.table_name = ?
|
||||||
|
`
|
||||||
|
|
||||||
|
var rows *sql.Rows
|
||||||
|
var err error
|
||||||
|
if rows, err = m.dbConn.Query(query, schema, schema, tableName); err != nil {
|
||||||
|
return nil, err
|
||||||
|
}
|
||||||
|
|
||||||
|
for rows.Next() {
|
||||||
|
var fkey bdb.ForeignKey
|
||||||
|
var sourceTable string
|
||||||
|
|
||||||
|
fkey.Table = tableName
|
||||||
|
err = rows.Scan(&fkey.Name, &sourceTable, &fkey.Column, &fkey.ForeignTable, &fkey.ForeignColumn)
|
||||||
|
if err != nil {
|
||||||
|
return nil, err
|
||||||
|
}
|
||||||
|
|
||||||
|
fkeys = append(fkeys, fkey)
|
||||||
|
}
|
||||||
|
|
||||||
|
if err = rows.Err(); err != nil {
|
||||||
|
return nil, err
|
||||||
|
}
|
||||||
|
|
||||||
|
return fkeys, nil
|
||||||
|
}
|
||||||
|
|
||||||
|
// TranslateColumnType converts postgres database types to Go types, for example
|
||||||
|
// "varchar" to "string" and "bigint" to "int64". It returns this parsed data
|
||||||
|
// as a Column object.
|
||||||
|
func (m *MSSQLDriver) TranslateColumnType(c bdb.Column) bdb.Column {
|
||||||
|
if c.Nullable {
|
||||||
|
switch c.DBType {
|
||||||
|
case "tinyint":
|
||||||
|
c.Type = "null.Int8"
|
||||||
|
case "smallint":
|
||||||
|
c.Type = "null.Int16"
|
||||||
|
case "mediumint":
|
||||||
|
c.Type = "null.Int32"
|
||||||
|
case "int":
|
||||||
|
c.Type = "null.Int"
|
||||||
|
case "bigint":
|
||||||
|
c.Type = "null.Int64"
|
||||||
|
case "real":
|
||||||
|
c.Type = "null.Float32"
|
||||||
|
case "float":
|
||||||
|
c.Type = "null.Float64"
|
||||||
|
case "boolean", "bool", "bit":
|
||||||
|
c.Type = "null.Bool"
|
||||||
|
case "date", "datetime", "datetime2", "smalldatetime", "time":
|
||||||
|
c.Type = "null.Time"
|
||||||
|
case "binary", "varbinary":
|
||||||
|
c.Type = "null.Bytes"
|
||||||
|
case "timestamp", "rowversion":
|
||||||
|
c.Type = "null.Bytes"
|
||||||
|
case "xml":
|
||||||
|
c.Type = "null.String"
|
||||||
|
case "uniqueidentifier":
|
||||||
|
c.Type = "null.String"
|
||||||
|
c.DBType = "uuid"
|
||||||
|
default:
|
||||||
|
c.Type = "null.String"
|
||||||
|
}
|
||||||
|
} else {
|
||||||
|
switch c.DBType {
|
||||||
|
case "tinyint":
|
||||||
|
c.Type = "int8"
|
||||||
|
case "smallint":
|
||||||
|
c.Type = "int16"
|
||||||
|
case "mediumint":
|
||||||
|
c.Type = "int32"
|
||||||
|
case "int":
|
||||||
|
c.Type = "int"
|
||||||
|
case "bigint":
|
||||||
|
c.Type = "int64"
|
||||||
|
case "real":
|
||||||
|
c.Type = "float32"
|
||||||
|
case "float":
|
||||||
|
c.Type = "float64"
|
||||||
|
case "boolean", "bool", "bit":
|
||||||
|
c.Type = "bool"
|
||||||
|
case "date", "datetime", "datetime2", "smalldatetime", "time":
|
||||||
|
c.Type = "time.Time"
|
||||||
|
case "binary", "varbinary":
|
||||||
|
c.Type = "[]byte"
|
||||||
|
case "timestamp", "rowversion":
|
||||||
|
c.Type = "[]byte"
|
||||||
|
case "xml":
|
||||||
|
c.Type = "string"
|
||||||
|
case "uniqueidentifier":
|
||||||
|
c.Type = "string"
|
||||||
|
c.DBType = "uuid"
|
||||||
|
default:
|
||||||
|
c.Type = "string"
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
return c
|
||||||
|
}
|
||||||
|
|
||||||
|
// RightQuote is the quoting character for the right side of the identifier
|
||||||
|
func (m *MSSQLDriver) RightQuote() byte {
|
||||||
|
return ']'
|
||||||
|
}
|
||||||
|
|
||||||
|
// LeftQuote is the quoting character for the left side of the identifier
|
||||||
|
func (m *MSSQLDriver) LeftQuote() byte {
|
||||||
|
return '['
|
||||||
|
}
|
||||||
|
|
||||||
|
// IndexPlaceholders returns true to indicate MS SQL supports indexed placeholders
|
||||||
|
func (m *MSSQLDriver) IndexPlaceholders() bool {
|
||||||
|
return true
|
||||||
|
}
|
|
@ -3,14 +3,21 @@ package drivers
|
||||||
import (
|
import (
|
||||||
"database/sql"
|
"database/sql"
|
||||||
"fmt"
|
"fmt"
|
||||||
|
"sort"
|
||||||
"strconv"
|
"strconv"
|
||||||
"strings"
|
"strings"
|
||||||
|
|
||||||
"github.com/go-sql-driver/mysql"
|
"github.com/go-sql-driver/mysql"
|
||||||
|
"github.com/lbryio/sqlboiler/bdb"
|
||||||
"github.com/pkg/errors"
|
"github.com/pkg/errors"
|
||||||
"github.com/vattle/sqlboiler/bdb"
|
|
||||||
)
|
)
|
||||||
|
|
||||||
|
// TinyintAsBool is a global that is set from main.go if a user specifies
|
||||||
|
// this flag when generating. This flag only applies to MySQL so we're using
|
||||||
|
// a global instead, to avoid breaking the interface. If TinyintAsBool is true
|
||||||
|
// then tinyint(1) will be mapped in your generated structs to bool opposed to int8.
|
||||||
|
var TinyintAsBool bool
|
||||||
|
|
||||||
// MySQLDriver holds the database connection string and a handle
|
// MySQLDriver holds the database connection string and a handle
|
||||||
// to the database connection.
|
// to the database connection.
|
||||||
type MySQLDriver struct {
|
type MySQLDriver struct {
|
||||||
|
@ -46,6 +53,7 @@ func MySQLBuildQueryString(user, pass, dbname, host string, port int, sslmode st
|
||||||
}
|
}
|
||||||
config.Addr += ":" + strconv.Itoa(port)
|
config.Addr += ":" + strconv.Itoa(port)
|
||||||
config.TLSConfig = sslmode
|
config.TLSConfig = sslmode
|
||||||
|
config.AllowNativePasswords = true
|
||||||
|
|
||||||
// MySQL is a bad, and by default reads date/datetime into a []byte
|
// MySQL is a bad, and by default reads date/datetime into a []byte
|
||||||
// instead of a time.Time. Tell it to stop being a bad.
|
// instead of a time.Time. Tell it to stop being a bad.
|
||||||
|
@ -75,6 +83,11 @@ func (m *MySQLDriver) UseLastInsertID() bool {
|
||||||
return true
|
return true
|
||||||
}
|
}
|
||||||
|
|
||||||
|
// UseTopClause returns false to indicate MySQL doesnt support SQL TOP clause
|
||||||
|
func (m *MySQLDriver) UseTopClause() bool {
|
||||||
|
return false
|
||||||
|
}
|
||||||
|
|
||||||
// TableNames connects to the postgres database and
|
// TableNames connects to the postgres database and
|
||||||
// retrieves all table names from the information_schema where the
|
// retrieves all table names from the information_schema where the
|
||||||
// table schema is public.
|
// table schema is public.
|
||||||
|
@ -123,6 +136,7 @@ func (m *MySQLDriver) Columns(schema, tableName string) ([]bdb.Column, error) {
|
||||||
rows, err := m.dbConn.Query(`
|
rows, err := m.dbConn.Query(`
|
||||||
select
|
select
|
||||||
c.column_name,
|
c.column_name,
|
||||||
|
c.column_type,
|
||||||
if(c.data_type = 'enum', c.column_type, c.data_type),
|
if(c.data_type = 'enum', c.column_type, c.data_type),
|
||||||
if(extra = 'auto_increment','auto_increment', c.column_default),
|
if(extra = 'auto_increment','auto_increment', c.column_default),
|
||||||
c.is_nullable = 'YES',
|
c.is_nullable = 'YES',
|
||||||
|
@ -132,7 +146,8 @@ func (m *MySQLDriver) Columns(schema, tableName string) ([]bdb.Column, error) {
|
||||||
inner join information_schema.key_column_usage kcu
|
inner join information_schema.key_column_usage kcu
|
||||||
on tc.constraint_name = kcu.constraint_name and tc.table_name = kcu.table_name and tc.table_schema = kcu.table_schema
|
on tc.constraint_name = kcu.constraint_name and tc.table_name = kcu.table_name and tc.table_schema = kcu.table_schema
|
||||||
where c.column_name = kcu.column_name and tc.table_name = c.table_name and
|
where c.column_name = kcu.column_name and tc.table_name = c.table_name and
|
||||||
(tc.constraint_type = 'PRIMARY KEY' or tc.constraint_type = 'UNIQUE')
|
(tc.constraint_type = 'PRIMARY KEY' or tc.constraint_type = 'UNIQUE') and
|
||||||
|
(select count(*) from information_schema.key_column_usage where table_schema = kcu.table_schema and table_name = tc.table_name and constraint_name = tc.constraint_name) = 1
|
||||||
) as is_unique
|
) as is_unique
|
||||||
from information_schema.columns as c
|
from information_schema.columns as c
|
||||||
where table_name = ? and table_schema = ?;
|
where table_name = ? and table_schema = ?;
|
||||||
|
@ -144,19 +159,21 @@ func (m *MySQLDriver) Columns(schema, tableName string) ([]bdb.Column, error) {
|
||||||
defer rows.Close()
|
defer rows.Close()
|
||||||
|
|
||||||
for rows.Next() {
|
for rows.Next() {
|
||||||
var colName, colType string
|
var colName, colType, colFullType string
|
||||||
var nullable, unique bool
|
var nullable, unique bool
|
||||||
var defaultValue *string
|
var defaultValue *string
|
||||||
if err := rows.Scan(&colName, &colType, &defaultValue, &nullable, &unique); err != nil {
|
if err := rows.Scan(&colName, &colFullType, &colType, &defaultValue, &nullable, &unique); err != nil {
|
||||||
return nil, errors.Wrapf(err, "unable to scan for table %s", tableName)
|
return nil, errors.Wrapf(err, "unable to scan for table %s", tableName)
|
||||||
}
|
}
|
||||||
|
|
||||||
column := bdb.Column{
|
column := bdb.Column{
|
||||||
Name: colName,
|
Name: colName,
|
||||||
|
FullDBType: colFullType, // example: tinyint(1) instead of tinyint
|
||||||
DBType: colType,
|
DBType: colType,
|
||||||
Nullable: nullable,
|
Nullable: nullable,
|
||||||
Unique: unique,
|
Unique: unique,
|
||||||
}
|
}
|
||||||
|
|
||||||
if defaultValue != nil && *defaultValue != "NULL" {
|
if defaultValue != nil && *defaultValue != "NULL" {
|
||||||
column.Default = *defaultValue
|
column.Default = *defaultValue
|
||||||
}
|
}
|
||||||
|
@ -217,6 +234,79 @@ func (m *MySQLDriver) PrimaryKeyInfo(schema, tableName string) (*bdb.PrimaryKey,
|
||||||
return pkey, nil
|
return pkey, nil
|
||||||
}
|
}
|
||||||
|
|
||||||
|
// UniqueKeyInfo retrieves the unique keys for a given table name.
|
||||||
|
func (m *MySQLDriver) UniqueKeyInfo(schema, tableName string) ([]bdb.UniqueKey, error) {
|
||||||
|
var ukeys []bdb.UniqueKey
|
||||||
|
|
||||||
|
query := `
|
||||||
|
select tc.table_name, tc.constraint_name, GROUP_CONCAT(kcu.column_name)
|
||||||
|
from information_schema.table_constraints tc
|
||||||
|
left join information_schema.key_column_usage kcu on tc.constraint_name = kcu.constraint_name and tc.table_name = kcu.table_name and tc.table_schema = kcu.table_schema
|
||||||
|
where tc.table_schema = ? and tc.table_name = ? and tc.constraint_type = "UNIQUE"
|
||||||
|
group by tc.table_name, tc.constraint_name
|
||||||
|
`
|
||||||
|
|
||||||
|
var rows *sql.Rows
|
||||||
|
var err error
|
||||||
|
if rows, err = m.dbConn.Query(query, schema, tableName); err != nil {
|
||||||
|
return nil, err
|
||||||
|
}
|
||||||
|
|
||||||
|
for rows.Next() {
|
||||||
|
var ukey bdb.UniqueKey
|
||||||
|
var columns string
|
||||||
|
|
||||||
|
//ukey.Table = tableName
|
||||||
|
err = rows.Scan(&ukey.Table, &ukey.Name, &columns)
|
||||||
|
if err != nil {
|
||||||
|
return nil, err
|
||||||
|
}
|
||||||
|
|
||||||
|
ukey.Columns = strings.Split(columns, ",")
|
||||||
|
sort.Strings(ukey.Columns)
|
||||||
|
|
||||||
|
ukeys = append(ukeys, ukey)
|
||||||
|
}
|
||||||
|
|
||||||
|
if err = rows.Err(); err != nil {
|
||||||
|
return nil, err
|
||||||
|
}
|
||||||
|
|
||||||
|
return ukeys, nil
|
||||||
|
}
|
||||||
|
|
||||||
|
// AutoincrementInfo retrieves the autoincrement column for a given table name, if one exists.
|
||||||
|
func (m *MySQLDriver) AutoincrementInfo(schema, tableName string) (string, error) {
|
||||||
|
query := `
|
||||||
|
select column_name
|
||||||
|
from information_schema.columns
|
||||||
|
where table_schema = ? and table_name = ? and extra like "%auto_increment%"
|
||||||
|
`
|
||||||
|
|
||||||
|
var rows *sql.Rows
|
||||||
|
var err error
|
||||||
|
if rows, err = m.dbConn.Query(query, schema, tableName); err != nil {
|
||||||
|
return "", err
|
||||||
|
}
|
||||||
|
|
||||||
|
for rows.Next() {
|
||||||
|
var column string
|
||||||
|
|
||||||
|
err = rows.Scan(&column)
|
||||||
|
if err != nil {
|
||||||
|
return "", err
|
||||||
|
}
|
||||||
|
|
||||||
|
return column, nil
|
||||||
|
}
|
||||||
|
|
||||||
|
if err = rows.Err(); err != nil {
|
||||||
|
return "", err
|
||||||
|
}
|
||||||
|
|
||||||
|
return "", nil
|
||||||
|
}
|
||||||
|
|
||||||
// ForeignKeyInfo retrieves the foreign keys for a given table name.
|
// ForeignKeyInfo retrieves the foreign keys for a given table name.
|
||||||
func (m *MySQLDriver) ForeignKeyInfo(schema, tableName string) ([]bdb.ForeignKey, error) {
|
func (m *MySQLDriver) ForeignKeyInfo(schema, tableName string) ([]bdb.ForeignKey, error) {
|
||||||
var fkeys []bdb.ForeignKey
|
var fkeys []bdb.ForeignKey
|
||||||
|
@ -257,18 +347,42 @@ func (m *MySQLDriver) ForeignKeyInfo(schema, tableName string) ([]bdb.ForeignKey
|
||||||
// "varchar" to "string" and "bigint" to "int64". It returns this parsed data
|
// "varchar" to "string" and "bigint" to "int64". It returns this parsed data
|
||||||
// as a Column object.
|
// as a Column object.
|
||||||
func (m *MySQLDriver) TranslateColumnType(c bdb.Column) bdb.Column {
|
func (m *MySQLDriver) TranslateColumnType(c bdb.Column) bdb.Column {
|
||||||
|
unsigned := strings.Contains(c.FullDBType, "unsigned")
|
||||||
if c.Nullable {
|
if c.Nullable {
|
||||||
switch c.DBType {
|
switch c.DBType {
|
||||||
case "tinyint":
|
case "tinyint":
|
||||||
|
// map tinyint(1) to bool if TinyintAsBool is true
|
||||||
|
if TinyintAsBool && c.FullDBType == "tinyint(1)" {
|
||||||
|
c.Type = "null.Bool"
|
||||||
|
} else if unsigned {
|
||||||
|
c.Type = "null.Uint8"
|
||||||
|
} else {
|
||||||
c.Type = "null.Int8"
|
c.Type = "null.Int8"
|
||||||
|
}
|
||||||
case "smallint":
|
case "smallint":
|
||||||
|
if unsigned {
|
||||||
|
c.Type = "null.Uint16"
|
||||||
|
} else {
|
||||||
c.Type = "null.Int16"
|
c.Type = "null.Int16"
|
||||||
|
}
|
||||||
case "mediumint":
|
case "mediumint":
|
||||||
|
if unsigned {
|
||||||
|
c.Type = "null.Uint32"
|
||||||
|
} else {
|
||||||
c.Type = "null.Int32"
|
c.Type = "null.Int32"
|
||||||
|
}
|
||||||
case "int", "integer":
|
case "int", "integer":
|
||||||
|
if unsigned {
|
||||||
|
c.Type = "null.Uint"
|
||||||
|
} else {
|
||||||
c.Type = "null.Int"
|
c.Type = "null.Int"
|
||||||
|
}
|
||||||
case "bigint":
|
case "bigint":
|
||||||
|
if unsigned {
|
||||||
|
c.Type = "null.Uint64"
|
||||||
|
} else {
|
||||||
c.Type = "null.Int64"
|
c.Type = "null.Int64"
|
||||||
|
}
|
||||||
case "float":
|
case "float":
|
||||||
c.Type = "null.Float32"
|
c.Type = "null.Float32"
|
||||||
case "double", "double precision", "real":
|
case "double", "double precision", "real":
|
||||||
|
@ -287,15 +401,38 @@ func (m *MySQLDriver) TranslateColumnType(c bdb.Column) bdb.Column {
|
||||||
} else {
|
} else {
|
||||||
switch c.DBType {
|
switch c.DBType {
|
||||||
case "tinyint":
|
case "tinyint":
|
||||||
|
// map tinyint(1) to bool if TinyintAsBool is true
|
||||||
|
if TinyintAsBool && c.FullDBType == "tinyint(1)" {
|
||||||
|
c.Type = "bool"
|
||||||
|
} else if unsigned {
|
||||||
|
c.Type = "uint8"
|
||||||
|
} else {
|
||||||
c.Type = "int8"
|
c.Type = "int8"
|
||||||
|
}
|
||||||
case "smallint":
|
case "smallint":
|
||||||
|
if unsigned {
|
||||||
|
c.Type = "uint16"
|
||||||
|
} else {
|
||||||
c.Type = "int16"
|
c.Type = "int16"
|
||||||
|
}
|
||||||
case "mediumint":
|
case "mediumint":
|
||||||
|
if unsigned {
|
||||||
|
c.Type = "uint32"
|
||||||
|
} else {
|
||||||
c.Type = "int32"
|
c.Type = "int32"
|
||||||
|
}
|
||||||
case "int", "integer":
|
case "int", "integer":
|
||||||
|
if unsigned {
|
||||||
|
c.Type = "uint"
|
||||||
|
} else {
|
||||||
c.Type = "int"
|
c.Type = "int"
|
||||||
|
}
|
||||||
case "bigint":
|
case "bigint":
|
||||||
|
if unsigned {
|
||||||
|
c.Type = "uint64"
|
||||||
|
} else {
|
||||||
c.Type = "int64"
|
c.Type = "int64"
|
||||||
|
}
|
||||||
case "float":
|
case "float":
|
||||||
c.Type = "float32"
|
c.Type = "float32"
|
||||||
case "double", "double precision", "real":
|
case "double", "double precision", "real":
|
||||||
|
|
|
@ -3,14 +3,15 @@ package drivers
|
||||||
import (
|
import (
|
||||||
"database/sql"
|
"database/sql"
|
||||||
"fmt"
|
"fmt"
|
||||||
|
"os"
|
||||||
"strings"
|
"strings"
|
||||||
|
|
||||||
// Side-effect import sql driver
|
// Side-effect import sql driver
|
||||||
|
|
||||||
|
"github.com/lbryio/sqlboiler/bdb"
|
||||||
|
"github.com/lbryio/sqlboiler/strmangle"
|
||||||
_ "github.com/lib/pq"
|
_ "github.com/lib/pq"
|
||||||
"github.com/pkg/errors"
|
"github.com/pkg/errors"
|
||||||
"github.com/vattle/sqlboiler/bdb"
|
|
||||||
"github.com/vattle/sqlboiler/strmangle"
|
|
||||||
)
|
)
|
||||||
|
|
||||||
// PostgresDriver holds the database connection string and a handle
|
// PostgresDriver holds the database connection string and a handle
|
||||||
|
@ -78,6 +79,11 @@ func (p *PostgresDriver) UseLastInsertID() bool {
|
||||||
return false
|
return false
|
||||||
}
|
}
|
||||||
|
|
||||||
|
// UseTopClause returns false to indicate PSQL doesnt support SQL TOP clause
|
||||||
|
func (m *PostgresDriver) UseTopClause() bool {
|
||||||
|
return false
|
||||||
|
}
|
||||||
|
|
||||||
// TableNames connects to the postgres database and
|
// TableNames connects to the postgres database and
|
||||||
// retrieves all table names from the information_schema where the
|
// retrieves all table names from the information_schema where the
|
||||||
// table schema is schema. It uses a whitelist and blacklist.
|
// table schema is schema. It uses a whitelist and blacklist.
|
||||||
|
@ -127,7 +133,7 @@ func (p *PostgresDriver) Columns(schema, tableName string) ([]bdb.Column, error)
|
||||||
select
|
select
|
||||||
c.column_name,
|
c.column_name,
|
||||||
(
|
(
|
||||||
case when c.data_type = 'USER-DEFINED' and c.udt_name <> 'hstore'
|
case when pgt.typtype = 'e'
|
||||||
then
|
then
|
||||||
(
|
(
|
||||||
select 'enum.' || c.udt_name || '(''' || string_agg(labels.label, ''',''') || ''')'
|
select 'enum.' || c.udt_name || '(''' || string_agg(labels.label, ''',''') || ''')'
|
||||||
|
@ -155,14 +161,15 @@ func (p *PostgresDriver) Columns(schema, tableName string) ([]bdb.Column, error)
|
||||||
c.is_nullable = 'YES' as is_nullable,
|
c.is_nullable = 'YES' as is_nullable,
|
||||||
(select exists(
|
(select exists(
|
||||||
select 1
|
select 1
|
||||||
from information_schema.constraint_column_usage as ccu
|
from information_schema.table_constraints tc
|
||||||
inner join information_schema.table_constraints tc on ccu.constraint_name = tc.constraint_name
|
inner join information_schema.constraint_column_usage as ccu on tc.constraint_name = ccu.constraint_name
|
||||||
where ccu.table_name = c.table_name and ccu.column_name = c.column_name and tc.constraint_type = 'UNIQUE'
|
where tc.table_schema = $1 and tc.constraint_type = 'UNIQUE' and ccu.constraint_schema = $1 and ccu.table_name = c.table_name and ccu.column_name = c.column_name and
|
||||||
)) OR (select exists(
|
(select count(*) from information_schema.constraint_column_usage where constraint_schema = $1 and constraint_name = tc.constraint_name) = 1
|
||||||
|
)) OR
|
||||||
|
(select exists(
|
||||||
select 1
|
select 1
|
||||||
from
|
from pg_indexes pgix
|
||||||
pg_indexes pgix
|
inner join pg_class pgc on pgix.indexname = pgc.relname and pgc.relkind = 'i' and pgc.relnatts = 1
|
||||||
inner join pg_class pgc on pgix.indexname = pgc.relname and pgc.relkind = 'i'
|
|
||||||
inner join pg_index pgi on pgi.indexrelid = pgc.oid
|
inner join pg_index pgi on pgi.indexrelid = pgc.oid
|
||||||
inner join pg_attribute pga on pga.attrelid = pgi.indrelid and pga.attnum = ANY(pgi.indkey)
|
inner join pg_attribute pga on pga.attrelid = pgi.indrelid and pga.attnum = ANY(pgi.indkey)
|
||||||
where
|
where
|
||||||
|
@ -170,6 +177,8 @@ func (p *PostgresDriver) Columns(schema, tableName string) ([]bdb.Column, error)
|
||||||
)) as is_unique
|
)) as is_unique
|
||||||
|
|
||||||
from information_schema.columns as c
|
from information_schema.columns as c
|
||||||
|
inner join pg_namespace as pgn on pgn.nspname = c.udt_schema
|
||||||
|
left join pg_type pgt on c.data_type = 'USER-DEFINED' and pgn.oid = pgt.typnamespace and c.udt_name = pgt.typname
|
||||||
left join information_schema.element_types e
|
left join information_schema.element_types e
|
||||||
on ((c.table_catalog, c.table_schema, c.table_name, 'TABLE', c.dtd_identifier)
|
on ((c.table_catalog, c.table_schema, c.table_name, 'TABLE', c.dtd_identifier)
|
||||||
= (e.object_catalog, e.object_schema, e.object_name, e.object_type, e.collection_type_identifier))
|
= (e.object_catalog, e.object_schema, e.object_name, e.object_type, e.collection_type_identifier))
|
||||||
|
@ -257,21 +266,32 @@ func (p *PostgresDriver) PrimaryKeyInfo(schema, tableName string) (*bdb.PrimaryK
|
||||||
return pkey, nil
|
return pkey, nil
|
||||||
}
|
}
|
||||||
|
|
||||||
|
func (p *PostgresDriver) UniqueKeyInfo(schema, tableName string) ([]bdb.UniqueKey, error) {
|
||||||
|
return []bdb.UniqueKey{}, errors.New("not implemented")
|
||||||
|
}
|
||||||
|
|
||||||
|
func (p *PostgresDriver) AutoincrementInfo(schema, tableName string) (string, error) {
|
||||||
|
return "", errors.New("not implemented")
|
||||||
|
}
|
||||||
|
|
||||||
// ForeignKeyInfo retrieves the foreign keys for a given table name.
|
// ForeignKeyInfo retrieves the foreign keys for a given table name.
|
||||||
func (p *PostgresDriver) ForeignKeyInfo(schema, tableName string) ([]bdb.ForeignKey, error) {
|
func (p *PostgresDriver) ForeignKeyInfo(schema, tableName string) ([]bdb.ForeignKey, error) {
|
||||||
var fkeys []bdb.ForeignKey
|
var fkeys []bdb.ForeignKey
|
||||||
|
|
||||||
query := `
|
query := `
|
||||||
select
|
select
|
||||||
tc.constraint_name,
|
pgcon.conname,
|
||||||
kcu.table_name as source_table,
|
pgc.relname as source_table,
|
||||||
kcu.column_name as source_column,
|
pgasrc.attname as source_column,
|
||||||
ccu.table_name as dest_table,
|
dstlookupname.relname as dest_table,
|
||||||
ccu.column_name as dest_column
|
pgadst.attname as dest_column
|
||||||
from information_schema.table_constraints as tc
|
from pg_namespace pgn
|
||||||
inner join information_schema.key_column_usage as kcu ON tc.constraint_name = kcu.constraint_name and tc.constraint_schema = kcu.constraint_schema
|
inner join pg_class pgc on pgn.oid = pgc.relnamespace and pgc.relkind = 'r'
|
||||||
inner join information_schema.constraint_column_usage as ccu ON tc.constraint_name = ccu.constraint_name and tc.constraint_schema = ccu.constraint_schema
|
inner join pg_constraint pgcon on pgn.oid = pgcon.connamespace and pgc.oid = pgcon.conrelid
|
||||||
where tc.table_name = $1 and tc.constraint_type = 'FOREIGN KEY' and tc.table_schema = $2;`
|
inner join pg_class dstlookupname on pgcon.confrelid = dstlookupname.oid
|
||||||
|
inner join pg_attribute pgasrc on pgc.oid = pgasrc.attrelid and pgasrc.attnum = ANY(pgcon.conkey)
|
||||||
|
inner join pg_attribute pgadst on pgcon.confrelid = pgadst.attrelid and pgadst.attnum = ANY(pgcon.confkey)
|
||||||
|
where pgn.nspname = $2 and pgc.relname = $1 and pgcon.contype = 'f'`
|
||||||
|
|
||||||
var rows *sql.Rows
|
var rows *sql.Rows
|
||||||
var err error
|
var err error
|
||||||
|
@ -340,7 +360,7 @@ func (p *PostgresDriver) TranslateColumnType(c bdb.Column) bdb.Column {
|
||||||
c.DBType = "hstore"
|
c.DBType = "hstore"
|
||||||
} else {
|
} else {
|
||||||
c.Type = "string"
|
c.Type = "string"
|
||||||
fmt.Printf("Warning: Incompatible data type detected: %s\n", c.UDTName)
|
fmt.Fprintln(os.Stderr, "Warning: Incompatible data type detected: %s\n", c.UDTName)
|
||||||
}
|
}
|
||||||
default:
|
default:
|
||||||
c.Type = "null.String"
|
c.Type = "null.String"
|
||||||
|
|
|
@ -9,6 +9,8 @@ type Interface interface {
|
||||||
TableNames(schema string, whitelist, blacklist []string) ([]string, error)
|
TableNames(schema string, whitelist, blacklist []string) ([]string, error)
|
||||||
Columns(schema, tableName string) ([]Column, error)
|
Columns(schema, tableName string) ([]Column, error)
|
||||||
PrimaryKeyInfo(schema, tableName string) (*PrimaryKey, error)
|
PrimaryKeyInfo(schema, tableName string) (*PrimaryKey, error)
|
||||||
|
UniqueKeyInfo(schema, tableName string) ([]UniqueKey, error)
|
||||||
|
AutoincrementInfo(schema, tableName string) (string, error)
|
||||||
ForeignKeyInfo(schema, tableName string) ([]ForeignKey, error)
|
ForeignKeyInfo(schema, tableName string) ([]ForeignKey, error)
|
||||||
|
|
||||||
// TranslateColumnType takes a Database column type and returns a go column type.
|
// TranslateColumnType takes a Database column type and returns a go column type.
|
||||||
|
@ -18,6 +20,10 @@ type Interface interface {
|
||||||
// the sql.Exec result's LastInsertId
|
// the sql.Exec result's LastInsertId
|
||||||
UseLastInsertID() bool
|
UseLastInsertID() bool
|
||||||
|
|
||||||
|
// UseTopClause should return true if the Database is capable of using
|
||||||
|
// the SQL TOP clause
|
||||||
|
UseTopClause() bool
|
||||||
|
|
||||||
// Open the database connection
|
// Open the database connection
|
||||||
Open() error
|
Open() error
|
||||||
// Close the database connection
|
// Close the database connection
|
||||||
|
@ -59,10 +65,18 @@ func Tables(db Interface, schema string, whitelist, blacklist []string) ([]Table
|
||||||
return nil, errors.Wrapf(err, "unable to fetch table pkey info (%s)", name)
|
return nil, errors.Wrapf(err, "unable to fetch table pkey info (%s)", name)
|
||||||
}
|
}
|
||||||
|
|
||||||
|
if t.UKeys, err = db.UniqueKeyInfo(schema, name); err != nil {
|
||||||
|
return nil, errors.Wrapf(err, "unable to fetch table ukey info (%s)", name)
|
||||||
|
}
|
||||||
|
|
||||||
if t.FKeys, err = db.ForeignKeyInfo(schema, name); err != nil {
|
if t.FKeys, err = db.ForeignKeyInfo(schema, name); err != nil {
|
||||||
return nil, errors.Wrapf(err, "unable to fetch table fkey info (%s)", name)
|
return nil, errors.Wrapf(err, "unable to fetch table fkey info (%s)", name)
|
||||||
}
|
}
|
||||||
|
|
||||||
|
if t.AutoIncrementColumn, err = db.AutoincrementInfo(schema, name); err != nil {
|
||||||
|
return nil, errors.Wrapf(err, "unable to fetch table autoincrement info (%s)", name)
|
||||||
|
}
|
||||||
|
|
||||||
setIsJoinTable(&t)
|
setIsJoinTable(&t)
|
||||||
|
|
||||||
tables = append(tables, t)
|
tables = append(tables, t)
|
||||||
|
|
|
@ -3,13 +3,14 @@ package bdb
|
||||||
import (
|
import (
|
||||||
"testing"
|
"testing"
|
||||||
|
|
||||||
"github.com/vattle/sqlboiler/strmangle"
|
"github.com/lbryio/sqlboiler/strmangle"
|
||||||
)
|
)
|
||||||
|
|
||||||
type testMockDriver struct{}
|
type testMockDriver struct{}
|
||||||
|
|
||||||
func (m testMockDriver) TranslateColumnType(c Column) Column { return c }
|
func (m testMockDriver) TranslateColumnType(c Column) Column { return c }
|
||||||
func (m testMockDriver) UseLastInsertID() bool { return false }
|
func (m testMockDriver) UseLastInsertID() bool { return false }
|
||||||
|
func (m testMockDriver) UseTopClause() bool { return false }
|
||||||
func (m testMockDriver) Open() error { return nil }
|
func (m testMockDriver) Open() error { return nil }
|
||||||
func (m testMockDriver) Close() {}
|
func (m testMockDriver) Close() {}
|
||||||
|
|
||||||
|
|
|
@ -8,6 +8,13 @@ type PrimaryKey struct {
|
||||||
Columns []string
|
Columns []string
|
||||||
}
|
}
|
||||||
|
|
||||||
|
// UniqueKey represents a unique key constraint in a database
|
||||||
|
type UniqueKey struct {
|
||||||
|
Table string
|
||||||
|
Name string
|
||||||
|
Columns []string
|
||||||
|
}
|
||||||
|
|
||||||
// ForeignKey represents a foreign key constraint in a database
|
// ForeignKey represents a foreign key constraint in a database
|
||||||
type ForeignKey struct {
|
type ForeignKey struct {
|
||||||
Table string
|
Table string
|
||||||
|
|
|
@ -8,9 +8,12 @@ type Table struct {
|
||||||
// For dbs with real schemas, like Postgres.
|
// For dbs with real schemas, like Postgres.
|
||||||
// Example value: "schema_name"."table_name"
|
// Example value: "schema_name"."table_name"
|
||||||
SchemaName string
|
SchemaName string
|
||||||
|
|
||||||
Columns []Column
|
Columns []Column
|
||||||
|
AutoIncrementColumn string
|
||||||
|
|
||||||
PKey *PrimaryKey
|
PKey *PrimaryKey
|
||||||
|
UKeys []UniqueKey
|
||||||
FKeys []ForeignKey
|
FKeys []ForeignKey
|
||||||
|
|
||||||
IsJoinTable bool
|
IsJoinTable bool
|
||||||
|
|
|
@ -19,6 +19,10 @@ type Transactor interface {
|
||||||
|
|
||||||
// Beginner begins transactions.
|
// Beginner begins transactions.
|
||||||
type Beginner interface {
|
type Beginner interface {
|
||||||
|
Begin() (Transactor, error)
|
||||||
|
}
|
||||||
|
|
||||||
|
type SQLBeginner interface {
|
||||||
Begin() (*sql.Tx, error)
|
Begin() (*sql.Tx, error)
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -26,8 +30,12 @@ type Beginner interface {
|
||||||
func Begin() (Transactor, error) {
|
func Begin() (Transactor, error) {
|
||||||
creator, ok := currentDB.(Beginner)
|
creator, ok := currentDB.(Beginner)
|
||||||
if !ok {
|
if !ok {
|
||||||
|
creator2, ok2 := currentDB.(SQLBeginner)
|
||||||
|
if !ok2 {
|
||||||
panic("database does not support transactions")
|
panic("database does not support transactions")
|
||||||
}
|
}
|
||||||
|
return creator2.Begin()
|
||||||
|
}
|
||||||
|
|
||||||
return creator.Begin()
|
return creator.Begin()
|
||||||
}
|
}
|
||||||
|
|
|
@ -1,23 +0,0 @@
|
||||||
package boil
|
|
||||||
|
|
||||||
type boilErr struct {
|
|
||||||
error
|
|
||||||
}
|
|
||||||
|
|
||||||
// WrapErr wraps err in a boilErr
|
|
||||||
func WrapErr(err error) error {
|
|
||||||
return boilErr{
|
|
||||||
error: err,
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
// Error returns the underlying error string
|
|
||||||
func (e boilErr) Error() string {
|
|
||||||
return e.error.Error()
|
|
||||||
}
|
|
||||||
|
|
||||||
// IsBoilErr checks if err is a boilErr
|
|
||||||
func IsBoilErr(err error) bool {
|
|
||||||
_, ok := err.(boilErr)
|
|
||||||
return ok
|
|
||||||
}
|
|
|
@ -1,24 +0,0 @@
|
||||||
package boil
|
|
||||||
|
|
||||||
import (
|
|
||||||
"errors"
|
|
||||||
"testing"
|
|
||||||
)
|
|
||||||
|
|
||||||
func TestErrors(t *testing.T) {
|
|
||||||
t.Parallel()
|
|
||||||
|
|
||||||
err := errors.New("test error")
|
|
||||||
if IsBoilErr(err) == true {
|
|
||||||
t.Errorf("Expected false")
|
|
||||||
}
|
|
||||||
|
|
||||||
err = WrapErr(errors.New("test error"))
|
|
||||||
if err.Error() != "test error" {
|
|
||||||
t.Errorf(`Expected "test error", got %v`, err.Error())
|
|
||||||
}
|
|
||||||
|
|
||||||
if IsBoilErr(err) != true {
|
|
||||||
t.Errorf("Expected true")
|
|
||||||
}
|
|
||||||
}
|
|
|
@ -1,6 +1,7 @@
|
||||||
package boil
|
package boil
|
||||||
|
|
||||||
import (
|
import (
|
||||||
|
"io"
|
||||||
"os"
|
"os"
|
||||||
"time"
|
"time"
|
||||||
)
|
)
|
||||||
|
@ -20,7 +21,7 @@ var (
|
||||||
var DebugMode = false
|
var DebugMode = false
|
||||||
|
|
||||||
// DebugWriter is where the debug output will be sent if DebugMode is true
|
// DebugWriter is where the debug output will be sent if DebugMode is true
|
||||||
var DebugWriter = os.Stdout
|
var DebugWriter io.Writer = os.Stdout
|
||||||
|
|
||||||
// SetDB initializes the database handle for all template db interactions
|
// SetDB initializes the database handle for all template db interactions
|
||||||
func SetDB(db Executor) {
|
func SetDB(db Executor) {
|
||||||
|
|
|
@ -1,6 +1,6 @@
|
||||||
// Package sqlboiler has types and methods useful for generating code that
|
// Package boilingcore has types and methods useful for generating code that
|
||||||
// acts as a fully dynamic ORM might.
|
// acts as a fully dynamic ORM might.
|
||||||
package main
|
package boilingcore
|
||||||
|
|
||||||
import (
|
import (
|
||||||
"encoding/json"
|
"encoding/json"
|
||||||
|
@ -13,10 +13,10 @@ import (
|
||||||
"text/template"
|
"text/template"
|
||||||
|
|
||||||
"github.com/pkg/errors"
|
"github.com/pkg/errors"
|
||||||
"github.com/vattle/sqlboiler/bdb"
|
"github.com/lbryio/sqlboiler/bdb"
|
||||||
"github.com/vattle/sqlboiler/bdb/drivers"
|
"github.com/lbryio/sqlboiler/bdb/drivers"
|
||||||
"github.com/vattle/sqlboiler/queries"
|
"github.com/lbryio/sqlboiler/queries"
|
||||||
"github.com/vattle/sqlboiler/strmangle"
|
"github.com/lbryio/sqlboiler/strmangle"
|
||||||
)
|
)
|
||||||
|
|
||||||
const (
|
const (
|
||||||
|
@ -43,6 +43,8 @@ type State struct {
|
||||||
SingletonTestTemplates *templateList
|
SingletonTestTemplates *templateList
|
||||||
|
|
||||||
TestMainTemplate *template.Template
|
TestMainTemplate *template.Template
|
||||||
|
|
||||||
|
Importer importer
|
||||||
}
|
}
|
||||||
|
|
||||||
// New creates a new state based off of the config
|
// New creates a new state based off of the config
|
||||||
|
@ -89,6 +91,8 @@ func New(config *Config) (*State, error) {
|
||||||
return nil, errors.Wrap(err, "unable to initialize struct tags")
|
return nil, errors.Wrap(err, "unable to initialize struct tags")
|
||||||
}
|
}
|
||||||
|
|
||||||
|
s.Importer = newImporter()
|
||||||
|
|
||||||
return s, nil
|
return s, nil
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -204,10 +208,66 @@ func (s *State) initTemplates() error {
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
return s.processReplacements()
|
||||||
|
}
|
||||||
|
|
||||||
|
// processReplacements loads any replacement templates
|
||||||
|
func (s *State) processReplacements() error {
|
||||||
|
basePath, err := getBasePath(s.Config.BaseDir)
|
||||||
|
if err != nil {
|
||||||
|
return err
|
||||||
|
}
|
||||||
|
|
||||||
|
for _, replace := range s.Config.Replacements {
|
||||||
|
splits := strings.Split(replace, ":")
|
||||||
|
if len(splits) != 2 {
|
||||||
|
return errors.Errorf("replace parameters must have 2 arguments, given: %s", replace)
|
||||||
|
}
|
||||||
|
|
||||||
|
var toReplaceFname string
|
||||||
|
toReplace, replaceWith := splits[0], splits[1]
|
||||||
|
|
||||||
|
inf, err := os.Stat(filepath.Join(basePath, toReplace))
|
||||||
|
if err != nil {
|
||||||
|
return errors.Errorf("cannot stat %q", toReplace)
|
||||||
|
}
|
||||||
|
if inf.IsDir() {
|
||||||
|
return errors.Errorf("replace argument must be a path to a file not a dir: %q", toReplace)
|
||||||
|
}
|
||||||
|
toReplaceFname = inf.Name()
|
||||||
|
|
||||||
|
inf, err = os.Stat(replaceWith)
|
||||||
|
if err != nil {
|
||||||
|
return errors.Errorf("cannot stat %q", replaceWith)
|
||||||
|
}
|
||||||
|
if inf.IsDir() {
|
||||||
|
return errors.Errorf("replace argument must be a path to a file not a dir: %q", replaceWith)
|
||||||
|
}
|
||||||
|
|
||||||
|
switch filepath.Dir(toReplace) {
|
||||||
|
case templatesDirectory:
|
||||||
|
err = replaceTemplate(s.Templates.Template, toReplaceFname, replaceWith)
|
||||||
|
case templatesSingletonDirectory:
|
||||||
|
err = replaceTemplate(s.SingletonTemplates.Template, toReplaceFname, replaceWith)
|
||||||
|
case templatesTestDirectory:
|
||||||
|
err = replaceTemplate(s.TestTemplates.Template, toReplaceFname, replaceWith)
|
||||||
|
case templatesSingletonTestDirectory:
|
||||||
|
err = replaceTemplate(s.SingletonTestTemplates.Template, toReplaceFname, replaceWith)
|
||||||
|
case templatesTestMainDirectory:
|
||||||
|
err = replaceTemplate(s.TestMainTemplate, toReplaceFname, replaceWith)
|
||||||
|
default:
|
||||||
|
return errors.Errorf("replace file's directory not part of any known folder: %s", toReplace)
|
||||||
|
}
|
||||||
|
|
||||||
|
if err != nil {
|
||||||
|
return err
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
return nil
|
return nil
|
||||||
}
|
}
|
||||||
|
|
||||||
var basePackage = "github.com/vattle/sqlboiler"
|
var basePackage = "github.com/lbryio/sqlboiler"
|
||||||
|
|
||||||
func getBasePath(baseDirConfig string) (string, error) {
|
func getBasePath(baseDirConfig string) (string, error) {
|
||||||
if len(baseDirConfig) > 0 {
|
if len(baseDirConfig) > 0 {
|
||||||
|
@ -245,6 +305,15 @@ func (s *State) initDriver(driverName string) error {
|
||||||
s.Config.MySQL.Port,
|
s.Config.MySQL.Port,
|
||||||
s.Config.MySQL.SSLMode,
|
s.Config.MySQL.SSLMode,
|
||||||
)
|
)
|
||||||
|
case "mssql":
|
||||||
|
s.Driver = drivers.NewMSSQLDriver(
|
||||||
|
s.Config.MSSQL.User,
|
||||||
|
s.Config.MSSQL.Pass,
|
||||||
|
s.Config.MSSQL.DBName,
|
||||||
|
s.Config.MSSQL.Host,
|
||||||
|
s.Config.MSSQL.Port,
|
||||||
|
s.Config.MSSQL.SSLMode,
|
||||||
|
)
|
||||||
case "mock":
|
case "mock":
|
||||||
s.Driver = &drivers.MockDriver{}
|
s.Driver = &drivers.MockDriver{}
|
||||||
}
|
}
|
||||||
|
@ -256,6 +325,7 @@ func (s *State) initDriver(driverName string) error {
|
||||||
s.Dialect.LQ = s.Driver.LeftQuote()
|
s.Dialect.LQ = s.Driver.LeftQuote()
|
||||||
s.Dialect.RQ = s.Driver.RightQuote()
|
s.Dialect.RQ = s.Driver.RightQuote()
|
||||||
s.Dialect.IndexPlaceholders = s.Driver.IndexPlaceholders()
|
s.Dialect.IndexPlaceholders = s.Driver.IndexPlaceholders()
|
||||||
|
s.Dialect.UseTopClause = s.Driver.UseTopClause()
|
||||||
|
|
||||||
return nil
|
return nil
|
||||||
}
|
}
|
||||||
|
@ -297,6 +367,12 @@ func (s *State) initTags(tags []string) error {
|
||||||
|
|
||||||
// initOutFolder creates the folder that will hold the generated output.
|
// initOutFolder creates the folder that will hold the generated output.
|
||||||
func (s *State) initOutFolder() error {
|
func (s *State) initOutFolder() error {
|
||||||
|
if s.Config.Wipe {
|
||||||
|
if err := os.RemoveAll(s.Config.OutFolder); err != nil {
|
||||||
|
return err
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
return os.MkdirAll(s.Config.OutFolder, os.ModePerm)
|
return os.MkdirAll(s.Config.OutFolder, os.ModePerm)
|
||||||
}
|
}
|
||||||
|
|
|
@ -1,4 +1,4 @@
|
||||||
package main
|
package boilingcore
|
||||||
|
|
||||||
import (
|
import (
|
||||||
"bufio"
|
"bufio"
|
|
@ -1,4 +1,4 @@
|
||||||
package main
|
package boilingcore
|
||||||
|
|
||||||
// Config for the running of the commands
|
// Config for the running of the commands
|
||||||
type Config struct {
|
type Config struct {
|
||||||
|
@ -10,13 +10,16 @@ type Config struct {
|
||||||
WhitelistTables []string
|
WhitelistTables []string
|
||||||
BlacklistTables []string
|
BlacklistTables []string
|
||||||
Tags []string
|
Tags []string
|
||||||
|
Replacements []string
|
||||||
Debug bool
|
Debug bool
|
||||||
NoTests bool
|
NoTests bool
|
||||||
NoHooks bool
|
NoHooks bool
|
||||||
NoAutoTimestamps bool
|
NoAutoTimestamps bool
|
||||||
|
Wipe bool
|
||||||
|
|
||||||
Postgres PostgresConfig
|
Postgres PostgresConfig
|
||||||
MySQL MySQLConfig
|
MySQL MySQLConfig
|
||||||
|
MSSQL MSSQLConfig
|
||||||
}
|
}
|
||||||
|
|
||||||
// PostgresConfig configures a postgres database
|
// PostgresConfig configures a postgres database
|
||||||
|
@ -38,3 +41,13 @@ type MySQLConfig struct {
|
||||||
DBName string
|
DBName string
|
||||||
SSLMode string
|
SSLMode string
|
||||||
}
|
}
|
||||||
|
|
||||||
|
// MSSQLConfig configures a mysql database
|
||||||
|
type MSSQLConfig struct {
|
||||||
|
User string
|
||||||
|
Pass string
|
||||||
|
Host string
|
||||||
|
Port int
|
||||||
|
DBName string
|
||||||
|
SSLMode string
|
||||||
|
}
|
466
boilingcore/imports.go
Normal file
466
boilingcore/imports.go
Normal file
|
@ -0,0 +1,466 @@
|
||||||
|
package boilingcore
|
||||||
|
|
||||||
|
import (
|
||||||
|
"bytes"
|
||||||
|
"fmt"
|
||||||
|
"sort"
|
||||||
|
"strings"
|
||||||
|
|
||||||
|
"github.com/lbryio/sqlboiler/bdb"
|
||||||
|
)
|
||||||
|
|
||||||
|
// imports defines the optional standard imports and
|
||||||
|
// thirdParty imports (from github for example)
|
||||||
|
type imports struct {
|
||||||
|
standard importList
|
||||||
|
thirdParty importList
|
||||||
|
}
|
||||||
|
|
||||||
|
// importList is a list of import names
|
||||||
|
type importList []string
|
||||||
|
|
||||||
|
func (i importList) Len() int {
|
||||||
|
return len(i)
|
||||||
|
}
|
||||||
|
|
||||||
|
func (i importList) Swap(k, j int) {
|
||||||
|
i[k], i[j] = i[j], i[k]
|
||||||
|
}
|
||||||
|
|
||||||
|
func (i importList) Less(k, j int) bool {
|
||||||
|
res := strings.Compare(strings.TrimLeft(i[k], "_ "), strings.TrimLeft(i[j], "_ "))
|
||||||
|
if res <= 0 {
|
||||||
|
return true
|
||||||
|
}
|
||||||
|
|
||||||
|
return false
|
||||||
|
}
|
||||||
|
|
||||||
|
func combineImports(a, b imports) imports {
|
||||||
|
var c imports
|
||||||
|
|
||||||
|
c.standard = removeDuplicates(combineStringSlices(a.standard, b.standard))
|
||||||
|
c.thirdParty = removeDuplicates(combineStringSlices(a.thirdParty, b.thirdParty))
|
||||||
|
|
||||||
|
sort.Sort(c.standard)
|
||||||
|
sort.Sort(c.thirdParty)
|
||||||
|
|
||||||
|
return c
|
||||||
|
}
|
||||||
|
|
||||||
|
func combineTypeImports(a imports, b map[string]imports, columns []bdb.Column) imports {
|
||||||
|
tmpImp := imports{
|
||||||
|
standard: make(importList, len(a.standard)),
|
||||||
|
thirdParty: make(importList, len(a.thirdParty)),
|
||||||
|
}
|
||||||
|
|
||||||
|
copy(tmpImp.standard, a.standard)
|
||||||
|
copy(tmpImp.thirdParty, a.thirdParty)
|
||||||
|
|
||||||
|
for _, col := range columns {
|
||||||
|
for key, imp := range b {
|
||||||
|
if col.Type == key {
|
||||||
|
tmpImp.standard = append(tmpImp.standard, imp.standard...)
|
||||||
|
tmpImp.thirdParty = append(tmpImp.thirdParty, imp.thirdParty...)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
tmpImp.standard = removeDuplicates(tmpImp.standard)
|
||||||
|
tmpImp.thirdParty = removeDuplicates(tmpImp.thirdParty)
|
||||||
|
|
||||||
|
sort.Sort(tmpImp.standard)
|
||||||
|
sort.Sort(tmpImp.thirdParty)
|
||||||
|
|
||||||
|
return tmpImp
|
||||||
|
}
|
||||||
|
|
||||||
|
func buildImportString(imps imports) []byte {
|
||||||
|
stdlen, thirdlen := len(imps.standard), len(imps.thirdParty)
|
||||||
|
if stdlen+thirdlen < 1 {
|
||||||
|
return []byte{}
|
||||||
|
}
|
||||||
|
|
||||||
|
if stdlen+thirdlen == 1 {
|
||||||
|
var imp string
|
||||||
|
if stdlen == 1 {
|
||||||
|
imp = imps.standard[0]
|
||||||
|
} else {
|
||||||
|
imp = imps.thirdParty[0]
|
||||||
|
}
|
||||||
|
return []byte(fmt.Sprintf("import %s", imp))
|
||||||
|
}
|
||||||
|
|
||||||
|
buf := &bytes.Buffer{}
|
||||||
|
buf.WriteString("import (")
|
||||||
|
for _, std := range imps.standard {
|
||||||
|
fmt.Fprintf(buf, "\n\t%s", std)
|
||||||
|
}
|
||||||
|
if stdlen != 0 && thirdlen != 0 {
|
||||||
|
buf.WriteString("\n")
|
||||||
|
}
|
||||||
|
for _, third := range imps.thirdParty {
|
||||||
|
fmt.Fprintf(buf, "\n\t%s", third)
|
||||||
|
}
|
||||||
|
buf.WriteString("\n)\n")
|
||||||
|
|
||||||
|
return buf.Bytes()
|
||||||
|
}
|
||||||
|
|
||||||
|
func combineStringSlices(a, b []string) []string {
|
||||||
|
c := make([]string, len(a)+len(b))
|
||||||
|
if len(a) > 0 {
|
||||||
|
copy(c, a)
|
||||||
|
}
|
||||||
|
if len(b) > 0 {
|
||||||
|
copy(c[len(a):], b)
|
||||||
|
}
|
||||||
|
|
||||||
|
return c
|
||||||
|
}
|
||||||
|
|
||||||
|
func removeDuplicates(dedup []string) []string {
|
||||||
|
if len(dedup) <= 1 {
|
||||||
|
return dedup
|
||||||
|
}
|
||||||
|
|
||||||
|
for i := 0; i < len(dedup)-1; i++ {
|
||||||
|
for j := i + 1; j < len(dedup); j++ {
|
||||||
|
if dedup[i] != dedup[j] {
|
||||||
|
continue
|
||||||
|
}
|
||||||
|
|
||||||
|
if j != len(dedup)-1 {
|
||||||
|
dedup[j] = dedup[len(dedup)-1]
|
||||||
|
j--
|
||||||
|
}
|
||||||
|
dedup = dedup[:len(dedup)-1]
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
return dedup
|
||||||
|
}
|
||||||
|
|
||||||
|
type mapImports map[string]imports
|
||||||
|
|
||||||
|
type importer struct {
|
||||||
|
Standard imports
|
||||||
|
TestStandard imports
|
||||||
|
|
||||||
|
Singleton mapImports
|
||||||
|
TestSingleton mapImports
|
||||||
|
|
||||||
|
TestMain mapImports
|
||||||
|
|
||||||
|
BasedOnType mapImports
|
||||||
|
}
|
||||||
|
|
||||||
|
// newImporter returns an importer struct with default import values
|
||||||
|
func newImporter() importer {
|
||||||
|
var imp importer
|
||||||
|
|
||||||
|
imp.Standard = imports{
|
||||||
|
standard: importList{
|
||||||
|
`"bytes"`,
|
||||||
|
`"database/sql"`,
|
||||||
|
`"fmt"`,
|
||||||
|
`"reflect"`,
|
||||||
|
`"strings"`,
|
||||||
|
`"sync"`,
|
||||||
|
`"time"`,
|
||||||
|
},
|
||||||
|
thirdParty: importList{
|
||||||
|
`"github.com/lbryio/lbry.go/v2/extras/errors"`,
|
||||||
|
`"github.com/lbryio/lbry.go/v2/extras/null"`,
|
||||||
|
`"github.com/lbryio/sqlboiler/boil"`,
|
||||||
|
`"github.com/lbryio/sqlboiler/queries"`,
|
||||||
|
`"github.com/lbryio/sqlboiler/queries/qm"`,
|
||||||
|
`"github.com/lbryio/sqlboiler/strmangle"`,
|
||||||
|
},
|
||||||
|
}
|
||||||
|
|
||||||
|
imp.Singleton = mapImports{
|
||||||
|
"boil_queries": imports{
|
||||||
|
standard: importList{
|
||||||
|
`"fmt"`,
|
||||||
|
`"strings"`,
|
||||||
|
},
|
||||||
|
thirdParty: importList{
|
||||||
|
`"github.com/lbryio/lbry.go/v2/extras/errors"`,
|
||||||
|
`"github.com/lbryio/sqlboiler/boil"`,
|
||||||
|
`"github.com/lbryio/sqlboiler/queries"`,
|
||||||
|
`"github.com/lbryio/sqlboiler/queries/qm"`,
|
||||||
|
`"github.com/lbryio/sqlboiler/strmangle"`,
|
||||||
|
},
|
||||||
|
},
|
||||||
|
"boil_types": {
|
||||||
|
thirdParty: importList{
|
||||||
|
`"github.com/lbryio/lbry.go/v2/extras/errors"`,
|
||||||
|
`"github.com/lbryio/sqlboiler/strmangle"`,
|
||||||
|
},
|
||||||
|
},
|
||||||
|
}
|
||||||
|
|
||||||
|
imp.TestStandard = imports{
|
||||||
|
standard: importList{
|
||||||
|
`"bytes"`,
|
||||||
|
`"reflect"`,
|
||||||
|
`"testing"`,
|
||||||
|
},
|
||||||
|
thirdParty: importList{
|
||||||
|
`"github.com/lbryio/sqlboiler/boil"`,
|
||||||
|
`"github.com/lbryio/sqlboiler/randomize"`,
|
||||||
|
`"github.com/lbryio/sqlboiler/strmangle"`,
|
||||||
|
},
|
||||||
|
}
|
||||||
|
|
||||||
|
imp.TestSingleton = mapImports{
|
||||||
|
"boil_main_test": {
|
||||||
|
standard: importList{
|
||||||
|
`"database/sql"`,
|
||||||
|
`"flag"`,
|
||||||
|
`"fmt"`,
|
||||||
|
`"math/rand"`,
|
||||||
|
`"os"`,
|
||||||
|
`"path/filepath"`,
|
||||||
|
`"testing"`,
|
||||||
|
`"time"`,
|
||||||
|
},
|
||||||
|
thirdParty: importList{
|
||||||
|
`"github.com/kat-co/vala"`,
|
||||||
|
`"github.com/lbryio/lbry.go/v2/extras/errors"`,
|
||||||
|
`"github.com/lbryio/sqlboiler/boil"`,
|
||||||
|
`"github.com/spf13/viper"`,
|
||||||
|
},
|
||||||
|
},
|
||||||
|
"boil_queries_test": {
|
||||||
|
standard: importList{
|
||||||
|
`"bytes"`,
|
||||||
|
`"fmt"`,
|
||||||
|
`"io"`,
|
||||||
|
`"io/ioutil"`,
|
||||||
|
`"math/rand"`,
|
||||||
|
`"regexp"`,
|
||||||
|
},
|
||||||
|
thirdParty: importList{
|
||||||
|
`"github.com/lbryio/sqlboiler/boil"`,
|
||||||
|
},
|
||||||
|
},
|
||||||
|
"boil_suites_test": {
|
||||||
|
standard: importList{
|
||||||
|
`"testing"`,
|
||||||
|
},
|
||||||
|
},
|
||||||
|
}
|
||||||
|
|
||||||
|
imp.TestMain = mapImports{
|
||||||
|
"postgres": {
|
||||||
|
standard: importList{
|
||||||
|
`"bytes"`,
|
||||||
|
`"database/sql"`,
|
||||||
|
`"fmt"`,
|
||||||
|
`"io"`,
|
||||||
|
`"io/ioutil"`,
|
||||||
|
`"os"`,
|
||||||
|
`"os/exec"`,
|
||||||
|
`"strings"`,
|
||||||
|
},
|
||||||
|
thirdParty: importList{
|
||||||
|
`"github.com/lbryio/lbry.go/v2/extras/errors"`,
|
||||||
|
`"github.com/lbryio/sqlboiler/bdb/drivers"`,
|
||||||
|
`"github.com/lbryio/sqlboiler/randomize"`,
|
||||||
|
`_ "github.com/lib/pq"`,
|
||||||
|
`"github.com/spf13/viper"`,
|
||||||
|
},
|
||||||
|
},
|
||||||
|
"mysql": {
|
||||||
|
standard: importList{
|
||||||
|
`"bytes"`,
|
||||||
|
`"database/sql"`,
|
||||||
|
`"fmt"`,
|
||||||
|
`"io"`,
|
||||||
|
`"io/ioutil"`,
|
||||||
|
`"os"`,
|
||||||
|
`"os/exec"`,
|
||||||
|
`"strings"`,
|
||||||
|
},
|
||||||
|
thirdParty: importList{
|
||||||
|
`_ "github.com/go-sql-driver/mysql"`,
|
||||||
|
`"github.com/lbryio/lbry.go/v2/extras/errors"`,
|
||||||
|
`"github.com/lbryio/sqlboiler/bdb/drivers"`,
|
||||||
|
`"github.com/lbryio/sqlboiler/randomize"`,
|
||||||
|
`"github.com/spf13/viper"`,
|
||||||
|
},
|
||||||
|
},
|
||||||
|
"mssql": {
|
||||||
|
standard: importList{
|
||||||
|
`"bytes"`,
|
||||||
|
`"database/sql"`,
|
||||||
|
`"fmt"`,
|
||||||
|
`"os"`,
|
||||||
|
`"os/exec"`,
|
||||||
|
`"strings"`,
|
||||||
|
},
|
||||||
|
thirdParty: importList{
|
||||||
|
`_ "github.com/denisenkom/go-mssqldb"`,
|
||||||
|
`"github.com/lbryio/lbry.go/v2/extras/errors"`,
|
||||||
|
`"github.com/lbryio/sqlboiler/bdb/drivers"`,
|
||||||
|
`"github.com/lbryio/sqlboiler/randomize"`,
|
||||||
|
`"github.com/spf13/viper"`,
|
||||||
|
},
|
||||||
|
},
|
||||||
|
}
|
||||||
|
|
||||||
|
// basedOnType imports are only included in the template output if the
|
||||||
|
// database requires one of the following special types. Check
|
||||||
|
// TranslateColumnType to see the type assignments.
|
||||||
|
imp.BasedOnType = mapImports{
|
||||||
|
"null.Float32": {
|
||||||
|
thirdParty: importList{`"github.com/lbryio/lbry.go/v2/extras/null"`},
|
||||||
|
},
|
||||||
|
"null.Float64": {
|
||||||
|
thirdParty: importList{`"github.com/lbryio/lbry.go/v2/extras/null"`},
|
||||||
|
},
|
||||||
|
"null.Int": {
|
||||||
|
thirdParty: importList{`"github.com/lbryio/lbry.go/v2/extras/null"`},
|
||||||
|
},
|
||||||
|
"null.Int8": {
|
||||||
|
thirdParty: importList{`"github.com/lbryio/lbry.go/v2/extras/null"`},
|
||||||
|
},
|
||||||
|
"null.Int16": {
|
||||||
|
thirdParty: importList{`"github.com/lbryio/lbry.go/v2/extras/null"`},
|
||||||
|
},
|
||||||
|
"null.Int32": {
|
||||||
|
thirdParty: importList{`"github.com/lbryio/lbry.go/v2/extras/null"`},
|
||||||
|
},
|
||||||
|
"null.Int64": {
|
||||||
|
thirdParty: importList{`"github.com/lbryio/lbry.go/v2/extras/null"`},
|
||||||
|
},
|
||||||
|
"null.Uint": {
|
||||||
|
thirdParty: importList{`"github.com/lbryio/lbry.go/v2/extras/null"`},
|
||||||
|
},
|
||||||
|
"null.Uint8": {
|
||||||
|
thirdParty: importList{`"github.com/lbryio/lbry.go/v2/extras/null"`},
|
||||||
|
},
|
||||||
|
"null.Uint16": {
|
||||||
|
thirdParty: importList{`"github.com/lbryio/lbry.go/v2/extras/null"`},
|
||||||
|
},
|
||||||
|
"null.Uint32": {
|
||||||
|
thirdParty: importList{`"github.com/lbryio/lbry.go/v2/extras/null"`},
|
||||||
|
},
|
||||||
|
"null.Uint64": {
|
||||||
|
thirdParty: importList{`"github.com/lbryio/lbry.go/v2/extras/null"`},
|
||||||
|
},
|
||||||
|
"null.String": {
|
||||||
|
thirdParty: importList{`"github.com/lbryio/lbry.go/v2/extras/null"`},
|
||||||
|
},
|
||||||
|
"null.Bool": {
|
||||||
|
thirdParty: importList{`"github.com/lbryio/lbry.go/v2/extras/null"`},
|
||||||
|
},
|
||||||
|
"null.Time": {
|
||||||
|
thirdParty: importList{`"github.com/lbryio/lbry.go/v2/extras/null"`},
|
||||||
|
},
|
||||||
|
"null.JSON": {
|
||||||
|
thirdParty: importList{`"github.com/lbryio/lbry.go/v2/extras/null"`},
|
||||||
|
},
|
||||||
|
"null.Bytes": {
|
||||||
|
thirdParty: importList{`"github.com/lbryio/lbry.go/v2/extras/null"`},
|
||||||
|
},
|
||||||
|
"time.Time": {
|
||||||
|
standard: importList{`"time"`},
|
||||||
|
},
|
||||||
|
"types.JSON": {
|
||||||
|
thirdParty: importList{`"github.com/lbryio/sqlboiler/types"`},
|
||||||
|
},
|
||||||
|
"types.BytesArray": {
|
||||||
|
thirdParty: importList{`"github.com/lbryio/sqlboiler/types"`},
|
||||||
|
},
|
||||||
|
"types.Int64Array": {
|
||||||
|
thirdParty: importList{`"github.com/lbryio/sqlboiler/types"`},
|
||||||
|
},
|
||||||
|
"types.Float64Array": {
|
||||||
|
thirdParty: importList{`"github.com/lbryio/sqlboiler/types"`},
|
||||||
|
},
|
||||||
|
"types.BoolArray": {
|
||||||
|
thirdParty: importList{`"github.com/lbryio/sqlboiler/types"`},
|
||||||
|
},
|
||||||
|
"types.StringArray": {
|
||||||
|
thirdParty: importList{`"github.com/lbryio/sqlboiler/types"`},
|
||||||
|
},
|
||||||
|
"types.Hstore": {
|
||||||
|
thirdParty: importList{`"github.com/lbryio/sqlboiler/types"`},
|
||||||
|
},
|
||||||
|
}
|
||||||
|
|
||||||
|
return imp
|
||||||
|
}
|
||||||
|
|
||||||
|
// Remove an import matching the match string under the specified key.
|
||||||
|
// Remove will search both standard and thirdParty import lists for a match.
|
||||||
|
func (m mapImports) Remove(key string, match string) {
|
||||||
|
mp := m[key]
|
||||||
|
for idx := 0; idx < len(mp.standard); idx++ {
|
||||||
|
if mp.standard[idx] == match {
|
||||||
|
mp.standard[idx] = mp.standard[len(mp.standard)-1]
|
||||||
|
mp.standard = mp.standard[:len(mp.standard)-1]
|
||||||
|
break
|
||||||
|
}
|
||||||
|
}
|
||||||
|
for idx := 0; idx < len(mp.thirdParty); idx++ {
|
||||||
|
if mp.thirdParty[idx] == match {
|
||||||
|
mp.thirdParty[idx] = mp.thirdParty[len(mp.thirdParty)-1]
|
||||||
|
mp.thirdParty = mp.thirdParty[:len(mp.thirdParty)-1]
|
||||||
|
break
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
// delete the key and return if both import lists are empty
|
||||||
|
if len(mp.thirdParty) == 0 && len(mp.standard) == 0 {
|
||||||
|
delete(m, key)
|
||||||
|
return
|
||||||
|
}
|
||||||
|
|
||||||
|
m[key] = mp
|
||||||
|
}
|
||||||
|
|
||||||
|
// Add an import under the specified key. If the key does not exist, it
|
||||||
|
// will be created.
|
||||||
|
func (m mapImports) Add(key string, value string, thirdParty bool) {
|
||||||
|
mp := m[key]
|
||||||
|
if thirdParty {
|
||||||
|
mp.thirdParty = append(mp.thirdParty, value)
|
||||||
|
} else {
|
||||||
|
mp.standard = append(mp.standard, value)
|
||||||
|
}
|
||||||
|
|
||||||
|
m[key] = mp
|
||||||
|
}
|
||||||
|
|
||||||
|
// Remove an import matching the match string under the specified key.
|
||||||
|
// Remove will search both standard and thirdParty import lists for a match.
|
||||||
|
func (i *imports) Remove(match string) {
|
||||||
|
for idx := 0; idx < len(i.standard); idx++ {
|
||||||
|
if i.standard[idx] == match {
|
||||||
|
i.standard[idx] = i.standard[len(i.standard)-1]
|
||||||
|
i.standard = i.standard[:len(i.standard)-1]
|
||||||
|
break
|
||||||
|
}
|
||||||
|
}
|
||||||
|
for idx := 0; idx < len(i.thirdParty); idx++ {
|
||||||
|
if i.thirdParty[idx] == match {
|
||||||
|
i.thirdParty[idx] = i.thirdParty[len(i.thirdParty)-1]
|
||||||
|
i.thirdParty = i.thirdParty[:len(i.thirdParty)-1]
|
||||||
|
break
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
// Add an import under the specified key. If the key does not exist, it
|
||||||
|
// will be created.
|
||||||
|
func (i *imports) Add(value string, thirdParty bool) {
|
||||||
|
if thirdParty {
|
||||||
|
i.thirdParty = append(i.thirdParty, value)
|
||||||
|
} else {
|
||||||
|
i.standard = append(i.standard, value)
|
||||||
|
}
|
||||||
|
}
|
395
boilingcore/imports_test.go
Normal file
395
boilingcore/imports_test.go
Normal file
|
@ -0,0 +1,395 @@
|
||||||
|
package boilingcore
|
||||||
|
|
||||||
|
import (
|
||||||
|
"reflect"
|
||||||
|
"sort"
|
||||||
|
"testing"
|
||||||
|
|
||||||
|
"github.com/pkg/errors"
|
||||||
|
"github.com/lbryio/sqlboiler/bdb"
|
||||||
|
)
|
||||||
|
|
||||||
|
func TestImportsSort(t *testing.T) {
|
||||||
|
t.Parallel()
|
||||||
|
|
||||||
|
a1 := importList{
|
||||||
|
`"fmt"`,
|
||||||
|
`"errors"`,
|
||||||
|
}
|
||||||
|
a2 := importList{
|
||||||
|
`_ "github.com/lib/pq"`,
|
||||||
|
`_ "github.com/gorilla/n"`,
|
||||||
|
`"github.com/gorilla/mux"`,
|
||||||
|
`"github.com/gorilla/websocket"`,
|
||||||
|
}
|
||||||
|
|
||||||
|
a1Expected := importList{`"errors"`, `"fmt"`}
|
||||||
|
a2Expected := importList{
|
||||||
|
`"github.com/gorilla/mux"`,
|
||||||
|
`_ "github.com/gorilla/n"`,
|
||||||
|
`"github.com/gorilla/websocket"`,
|
||||||
|
`_ "github.com/lib/pq"`,
|
||||||
|
}
|
||||||
|
|
||||||
|
sort.Sort(a1)
|
||||||
|
if !reflect.DeepEqual(a1, a1Expected) {
|
||||||
|
t.Errorf("Expected a1 to match a1Expected, got: %v", a1)
|
||||||
|
}
|
||||||
|
|
||||||
|
for i, v := range a1 {
|
||||||
|
if v != a1Expected[i] {
|
||||||
|
t.Errorf("Expected a1[%d] to match a1Expected[%d]:\n%s\n%s\n", i, i, v, a1Expected[i])
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
sort.Sort(a2)
|
||||||
|
if !reflect.DeepEqual(a2, a2Expected) {
|
||||||
|
t.Errorf("Expected a2 to match a2expected, got: %v", a2)
|
||||||
|
}
|
||||||
|
|
||||||
|
for i, v := range a2 {
|
||||||
|
if v != a2Expected[i] {
|
||||||
|
t.Errorf("Expected a2[%d] to match a2Expected[%d]:\n%s\n%s\n", i, i, v, a1Expected[i])
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
func TestImportsAddAndRemove(t *testing.T) {
|
||||||
|
t.Parallel()
|
||||||
|
|
||||||
|
var imp imports
|
||||||
|
imp.Add("value", false)
|
||||||
|
if len(imp.standard) != 1 {
|
||||||
|
t.Errorf("expected len 1, got %d", len(imp.standard))
|
||||||
|
}
|
||||||
|
if imp.standard[0] != "value" {
|
||||||
|
t.Errorf("expected %q to be added", "value")
|
||||||
|
}
|
||||||
|
imp.Add("value2", true)
|
||||||
|
if len(imp.thirdParty) != 1 {
|
||||||
|
t.Errorf("expected len 1, got %d", len(imp.thirdParty))
|
||||||
|
}
|
||||||
|
if imp.thirdParty[0] != "value2" {
|
||||||
|
t.Errorf("expected %q to be added", "value2")
|
||||||
|
}
|
||||||
|
|
||||||
|
imp.Remove("value")
|
||||||
|
if len(imp.standard) != 0 {
|
||||||
|
t.Errorf("expected len 0, got %d", len(imp.standard))
|
||||||
|
}
|
||||||
|
imp.Remove("value")
|
||||||
|
if len(imp.standard) != 0 {
|
||||||
|
t.Errorf("expected len 0, got %d", len(imp.standard))
|
||||||
|
}
|
||||||
|
imp.Remove("value2")
|
||||||
|
if len(imp.thirdParty) != 0 {
|
||||||
|
t.Errorf("expected len 0, got %d", len(imp.thirdParty))
|
||||||
|
}
|
||||||
|
|
||||||
|
// Test deleting last element in len 2 slice
|
||||||
|
imp.Add("value3", false)
|
||||||
|
imp.Add("value4", false)
|
||||||
|
if len(imp.standard) != 2 {
|
||||||
|
t.Errorf("expected len 2, got %d", len(imp.standard))
|
||||||
|
}
|
||||||
|
imp.Remove("value4")
|
||||||
|
if len(imp.standard) != 1 {
|
||||||
|
t.Errorf("expected len 1, got %d", len(imp.standard))
|
||||||
|
}
|
||||||
|
if imp.standard[0] != "value3" {
|
||||||
|
t.Errorf("expected %q, got %q", "value3", imp.standard[0])
|
||||||
|
}
|
||||||
|
// Test deleting first element in len 2 slice
|
||||||
|
imp.Add("value4", false)
|
||||||
|
imp.Remove("value3")
|
||||||
|
if len(imp.standard) != 1 {
|
||||||
|
t.Errorf("expected len 1, got %d", len(imp.standard))
|
||||||
|
}
|
||||||
|
if imp.standard[0] != "value4" {
|
||||||
|
t.Errorf("expected %q, got %q", "value4", imp.standard[0])
|
||||||
|
}
|
||||||
|
imp.Remove("value2")
|
||||||
|
if len(imp.thirdParty) != 0 {
|
||||||
|
t.Errorf("expected len 0, got %d", len(imp.thirdParty))
|
||||||
|
}
|
||||||
|
|
||||||
|
// Test deleting last element in len 2 slice
|
||||||
|
imp.Add("value5", true)
|
||||||
|
imp.Add("value6", true)
|
||||||
|
if len(imp.thirdParty) != 2 {
|
||||||
|
t.Errorf("expected len 2, got %d", len(imp.thirdParty))
|
||||||
|
}
|
||||||
|
imp.Remove("value6")
|
||||||
|
if len(imp.thirdParty) != 1 {
|
||||||
|
t.Errorf("expected len 1, got %d", len(imp.thirdParty))
|
||||||
|
}
|
||||||
|
if imp.thirdParty[0] != "value5" {
|
||||||
|
t.Errorf("expected %q, got %q", "value5", imp.thirdParty[0])
|
||||||
|
}
|
||||||
|
// Test deleting first element in len 2 slice
|
||||||
|
imp.Add("value6", true)
|
||||||
|
imp.Remove("value5")
|
||||||
|
if len(imp.thirdParty) != 1 {
|
||||||
|
t.Errorf("expected len 1, got %d", len(imp.thirdParty))
|
||||||
|
}
|
||||||
|
if imp.thirdParty[0] != "value6" {
|
||||||
|
t.Errorf("expected %q, got %q", "value6", imp.thirdParty[0])
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
func TestMapImportsAddAndRemove(t *testing.T) {
|
||||||
|
t.Parallel()
|
||||||
|
|
||||||
|
imp := mapImports{}
|
||||||
|
imp.Add("cat", "value", false)
|
||||||
|
if len(imp["cat"].standard) != 1 {
|
||||||
|
t.Errorf("expected len 1, got %d", len(imp["cat"].standard))
|
||||||
|
}
|
||||||
|
if imp["cat"].standard[0] != "value" {
|
||||||
|
t.Errorf("expected %q to be added", "value")
|
||||||
|
}
|
||||||
|
imp.Add("cat", "value2", true)
|
||||||
|
if len(imp["cat"].thirdParty) != 1 {
|
||||||
|
t.Errorf("expected len 1, got %d", len(imp["cat"].thirdParty))
|
||||||
|
}
|
||||||
|
if imp["cat"].thirdParty[0] != "value2" {
|
||||||
|
t.Errorf("expected %q to be added", "value2")
|
||||||
|
}
|
||||||
|
|
||||||
|
imp.Remove("cat", "value")
|
||||||
|
if len(imp["cat"].standard) != 0 {
|
||||||
|
t.Errorf("expected len 0, got %d", len(imp["cat"].standard))
|
||||||
|
}
|
||||||
|
imp.Remove("cat", "value")
|
||||||
|
if len(imp["cat"].standard) != 0 {
|
||||||
|
t.Errorf("expected len 0, got %d", len(imp["cat"].standard))
|
||||||
|
}
|
||||||
|
imp.Remove("cat", "value2")
|
||||||
|
if len(imp["cat"].thirdParty) != 0 {
|
||||||
|
t.Errorf("expected len 0, got %d", len(imp["cat"].thirdParty))
|
||||||
|
}
|
||||||
|
// If there are no elements left in key, test key is deleted
|
||||||
|
_, ok := imp["cat"]
|
||||||
|
if ok {
|
||||||
|
t.Errorf("expected cat key to be deleted when list empty")
|
||||||
|
}
|
||||||
|
|
||||||
|
// Test deleting last element in len 2 slice
|
||||||
|
imp.Add("cat", "value3", false)
|
||||||
|
imp.Add("cat", "value4", false)
|
||||||
|
if len(imp["cat"].standard) != 2 {
|
||||||
|
t.Errorf("expected len 2, got %d", len(imp["cat"].standard))
|
||||||
|
}
|
||||||
|
imp.Remove("cat", "value4")
|
||||||
|
if len(imp["cat"].standard) != 1 {
|
||||||
|
t.Errorf("expected len 1, got %d", len(imp["cat"].standard))
|
||||||
|
}
|
||||||
|
if imp["cat"].standard[0] != "value3" {
|
||||||
|
t.Errorf("expected %q, got %q", "value3", imp["cat"].standard[0])
|
||||||
|
}
|
||||||
|
// Test deleting first element in len 2 slice
|
||||||
|
imp.Add("cat", "value4", false)
|
||||||
|
imp.Remove("cat", "value3")
|
||||||
|
if len(imp["cat"].standard) != 1 {
|
||||||
|
t.Errorf("expected len 1, got %d", len(imp["cat"].standard))
|
||||||
|
}
|
||||||
|
if imp["cat"].standard[0] != "value4" {
|
||||||
|
t.Errorf("expected %q, got %q", "value4", imp["cat"].standard[0])
|
||||||
|
}
|
||||||
|
imp.Remove("cat", "value2")
|
||||||
|
if len(imp["cat"].thirdParty) != 0 {
|
||||||
|
t.Errorf("expected len 0, got %d", len(imp["cat"].thirdParty))
|
||||||
|
}
|
||||||
|
|
||||||
|
// Test deleting last element in len 2 slice
|
||||||
|
imp.Add("dog", "value5", true)
|
||||||
|
imp.Add("dog", "value6", true)
|
||||||
|
if len(imp["dog"].thirdParty) != 2 {
|
||||||
|
t.Errorf("expected len 2, got %d", len(imp["dog"].thirdParty))
|
||||||
|
}
|
||||||
|
imp.Remove("dog", "value6")
|
||||||
|
if len(imp["dog"].thirdParty) != 1 {
|
||||||
|
t.Errorf("expected len 1, got %d", len(imp["dog"].thirdParty))
|
||||||
|
}
|
||||||
|
if imp["dog"].thirdParty[0] != "value5" {
|
||||||
|
t.Errorf("expected %q, got %q", "value5", imp["dog"].thirdParty[0])
|
||||||
|
}
|
||||||
|
// Test deleting first element in len 2 slice
|
||||||
|
imp.Add("dog", "value6", true)
|
||||||
|
imp.Remove("dog", "value5")
|
||||||
|
if len(imp["dog"].thirdParty) != 1 {
|
||||||
|
t.Errorf("expected len 1, got %d", len(imp["dog"].thirdParty))
|
||||||
|
}
|
||||||
|
if imp["dog"].thirdParty[0] != "value6" {
|
||||||
|
t.Errorf("expected %q, got %q", "value6", imp["dog"].thirdParty[0])
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
func TestCombineTypeImports(t *testing.T) {
|
||||||
|
t.Parallel()
|
||||||
|
|
||||||
|
imports1 := imports{
|
||||||
|
standard: importList{
|
||||||
|
`"errors"`,
|
||||||
|
`"fmt"`,
|
||||||
|
},
|
||||||
|
thirdParty: importList{
|
||||||
|
`"github.com/lbryio/sqlboiler/boil"`,
|
||||||
|
},
|
||||||
|
}
|
||||||
|
|
||||||
|
importsExpected := imports{
|
||||||
|
standard: importList{
|
||||||
|
`"errors"`,
|
||||||
|
`"fmt"`,
|
||||||
|
`"time"`,
|
||||||
|
},
|
||||||
|
thirdParty: importList{
|
||||||
|
`"github.com/lbryio/sqlboiler/boil"`,
|
||||||
|
`"github.com/lbryio/lbry.go/v2/extras/null"`,
|
||||||
|
},
|
||||||
|
}
|
||||||
|
|
||||||
|
cols := []bdb.Column{
|
||||||
|
{
|
||||||
|
Type: "null.Time",
|
||||||
|
},
|
||||||
|
{
|
||||||
|
Type: "null.Time",
|
||||||
|
},
|
||||||
|
{
|
||||||
|
Type: "time.Time",
|
||||||
|
},
|
||||||
|
{
|
||||||
|
Type: "null.Float",
|
||||||
|
},
|
||||||
|
}
|
||||||
|
|
||||||
|
imps := newImporter()
|
||||||
|
|
||||||
|
res1 := combineTypeImports(imports1, imps.BasedOnType, cols)
|
||||||
|
|
||||||
|
if !reflect.DeepEqual(res1, importsExpected) {
|
||||||
|
t.Errorf("Expected res1 to match importsExpected, got:\n\n%#v\n", res1)
|
||||||
|
}
|
||||||
|
|
||||||
|
imports2 := imports{
|
||||||
|
standard: importList{
|
||||||
|
`"errors"`,
|
||||||
|
`"fmt"`,
|
||||||
|
`"time"`,
|
||||||
|
},
|
||||||
|
thirdParty: importList{
|
||||||
|
`"github.com/lbryio/sqlboiler/boil"`,
|
||||||
|
`"github.com/lbryio/lbry.go/v2/extras/null"`,
|
||||||
|
},
|
||||||
|
}
|
||||||
|
|
||||||
|
res2 := combineTypeImports(imports2, imps.BasedOnType, cols)
|
||||||
|
|
||||||
|
if !reflect.DeepEqual(res2, importsExpected) {
|
||||||
|
t.Errorf("Expected res2 to match importsExpected, got:\n\n%#v\n", res1)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
func TestCombineImports(t *testing.T) {
|
||||||
|
t.Parallel()
|
||||||
|
|
||||||
|
a := imports{
|
||||||
|
standard: importList{"fmt"},
|
||||||
|
thirdParty: importList{"github.com/lbryio/sqlboiler", "github.com/lbryio/lbry.go/v2/extras/null"},
|
||||||
|
}
|
||||||
|
b := imports{
|
||||||
|
standard: importList{"os"},
|
||||||
|
thirdParty: importList{"github.com/lbryio/sqlboiler"},
|
||||||
|
}
|
||||||
|
|
||||||
|
c := combineImports(a, b)
|
||||||
|
|
||||||
|
if c.standard[0] != "fmt" && c.standard[1] != "os" {
|
||||||
|
t.Errorf("Wanted: fmt, os got: %#v", c.standard)
|
||||||
|
}
|
||||||
|
if c.thirdParty[0] != "github.com/lbryio/sqlboiler" && c.thirdParty[1] != "github.com/lbryio/lbry.go/v2/extras/null" {
|
||||||
|
t.Errorf("Wanted: github.com/lbryio/sqlboiler, github.com/lbryio/lbry.go/v2/extras/null got: %#v", c.thirdParty)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
func TestRemoveDuplicates(t *testing.T) {
|
||||||
|
t.Parallel()
|
||||||
|
|
||||||
|
hasDups := func(possible []string) error {
|
||||||
|
for i := 0; i < len(possible)-1; i++ {
|
||||||
|
for j := i + 1; j < len(possible); j++ {
|
||||||
|
if possible[i] == possible[j] {
|
||||||
|
return errors.Errorf("found duplicate: %s [%d] [%d]", possible[i], i, j)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
return nil
|
||||||
|
}
|
||||||
|
|
||||||
|
if len(removeDuplicates([]string{})) != 0 {
|
||||||
|
t.Error("It should have returned an empty slice")
|
||||||
|
}
|
||||||
|
|
||||||
|
oneItem := []string{"patrick"}
|
||||||
|
slice := removeDuplicates(oneItem)
|
||||||
|
if ln := len(slice); ln != 1 {
|
||||||
|
t.Error("Length was wrong:", ln)
|
||||||
|
} else if oneItem[0] != slice[0] {
|
||||||
|
t.Errorf("Slices differ: %#v %#v", oneItem, slice)
|
||||||
|
}
|
||||||
|
|
||||||
|
slice = removeDuplicates([]string{"hello", "patrick", "hello"})
|
||||||
|
if ln := len(slice); ln != 2 {
|
||||||
|
t.Error("Length was wrong:", ln)
|
||||||
|
}
|
||||||
|
if err := hasDups(slice); err != nil {
|
||||||
|
t.Error(err)
|
||||||
|
}
|
||||||
|
|
||||||
|
slice = removeDuplicates([]string{"five", "patrick", "hello", "hello", "patrick", "hello", "hello"})
|
||||||
|
if ln := len(slice); ln != 3 {
|
||||||
|
t.Error("Length was wrong:", ln)
|
||||||
|
}
|
||||||
|
if err := hasDups(slice); err != nil {
|
||||||
|
t.Error(err)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
func TestCombineStringSlices(t *testing.T) {
|
||||||
|
t.Parallel()
|
||||||
|
|
||||||
|
var a, b []string
|
||||||
|
slice := combineStringSlices(a, b)
|
||||||
|
if ln := len(slice); ln != 0 {
|
||||||
|
t.Error("Len was wrong:", ln)
|
||||||
|
}
|
||||||
|
|
||||||
|
a = []string{"1", "2"}
|
||||||
|
slice = combineStringSlices(a, b)
|
||||||
|
if ln := len(slice); ln != 2 {
|
||||||
|
t.Error("Len was wrong:", ln)
|
||||||
|
} else if slice[0] != a[0] || slice[1] != a[1] {
|
||||||
|
t.Errorf("Slice mismatch: %#v %#v", a, slice)
|
||||||
|
}
|
||||||
|
|
||||||
|
b = a
|
||||||
|
a = nil
|
||||||
|
slice = combineStringSlices(a, b)
|
||||||
|
if ln := len(slice); ln != 2 {
|
||||||
|
t.Error("Len was wrong:", ln)
|
||||||
|
} else if slice[0] != b[0] || slice[1] != b[1] {
|
||||||
|
t.Errorf("Slice mismatch: %#v %#v", b, slice)
|
||||||
|
}
|
||||||
|
|
||||||
|
a = b
|
||||||
|
b = []string{"3", "4"}
|
||||||
|
slice = combineStringSlices(a, b)
|
||||||
|
if ln := len(slice); ln != 4 {
|
||||||
|
t.Error("Len was wrong:", ln)
|
||||||
|
} else if slice[0] != a[0] || slice[1] != a[1] || slice[2] != b[0] || slice[3] != b[1] {
|
||||||
|
t.Errorf("Slice mismatch: %#v + %#v != #%v", a, b, slice)
|
||||||
|
}
|
||||||
|
}
|
|
@ -1,4 +1,4 @@
|
||||||
package main
|
package boilingcore
|
||||||
|
|
||||||
import (
|
import (
|
||||||
"bufio"
|
"bufio"
|
||||||
|
@ -14,6 +14,12 @@ import (
|
||||||
"github.com/pkg/errors"
|
"github.com/pkg/errors"
|
||||||
)
|
)
|
||||||
|
|
||||||
|
var noEditDisclaimer = []byte(`// This file is generated by SQLBoiler (https://github.com/lbryio/sqlboiler)
|
||||||
|
// and is meant to be re-generated in place and/or deleted at any time.
|
||||||
|
// DO NOT EDIT
|
||||||
|
|
||||||
|
`)
|
||||||
|
|
||||||
var (
|
var (
|
||||||
// templateByteBuffer is re-used by all template construction to avoid
|
// templateByteBuffer is re-used by all template construction to avoid
|
||||||
// allocating more memory than is needed. This will later be a problem for
|
// allocating more memory than is needed. This will later be a problem for
|
||||||
|
@ -32,7 +38,7 @@ func generateOutput(state *State, data *templateData) error {
|
||||||
state: state,
|
state: state,
|
||||||
data: data,
|
data: data,
|
||||||
templates: state.Templates,
|
templates: state.Templates,
|
||||||
importSet: defaultTemplateImports,
|
importSet: state.Importer.Standard,
|
||||||
combineImportsOnType: true,
|
combineImportsOnType: true,
|
||||||
fileSuffix: ".go",
|
fileSuffix: ".go",
|
||||||
})
|
})
|
||||||
|
@ -44,7 +50,7 @@ func generateTestOutput(state *State, data *templateData) error {
|
||||||
state: state,
|
state: state,
|
||||||
data: data,
|
data: data,
|
||||||
templates: state.TestTemplates,
|
templates: state.TestTemplates,
|
||||||
importSet: defaultTestTemplateImports,
|
importSet: state.Importer.TestStandard,
|
||||||
combineImportsOnType: false,
|
combineImportsOnType: false,
|
||||||
fileSuffix: "_test.go",
|
fileSuffix: "_test.go",
|
||||||
})
|
})
|
||||||
|
@ -57,7 +63,7 @@ func generateSingletonOutput(state *State, data *templateData) error {
|
||||||
state: state,
|
state: state,
|
||||||
data: data,
|
data: data,
|
||||||
templates: state.SingletonTemplates,
|
templates: state.SingletonTemplates,
|
||||||
importNamedSet: defaultSingletonTemplateImports,
|
importNamedSet: state.Importer.Singleton,
|
||||||
fileSuffix: ".go",
|
fileSuffix: ".go",
|
||||||
})
|
})
|
||||||
}
|
}
|
||||||
|
@ -69,7 +75,7 @@ func generateSingletonTestOutput(state *State, data *templateData) error {
|
||||||
state: state,
|
state: state,
|
||||||
data: data,
|
data: data,
|
||||||
templates: state.SingletonTestTemplates,
|
templates: state.SingletonTestTemplates,
|
||||||
importNamedSet: defaultSingletonTestTemplateImports,
|
importNamedSet: state.Importer.TestSingleton,
|
||||||
fileSuffix: ".go",
|
fileSuffix: ".go",
|
||||||
})
|
})
|
||||||
}
|
}
|
||||||
|
@ -100,9 +106,10 @@ func executeTemplates(e executeTemplateData) error {
|
||||||
imps.standard = e.importSet.standard
|
imps.standard = e.importSet.standard
|
||||||
imps.thirdParty = e.importSet.thirdParty
|
imps.thirdParty = e.importSet.thirdParty
|
||||||
if e.combineImportsOnType {
|
if e.combineImportsOnType {
|
||||||
imps = combineTypeImports(imps, importsBasedOnType, e.data.Table.Columns)
|
imps = combineTypeImports(imps, e.state.Importer.BasedOnType, e.data.Table.Columns)
|
||||||
}
|
}
|
||||||
|
|
||||||
|
writeFileDisclaimer(out)
|
||||||
writePackageName(out, e.state.Config.PkgName)
|
writePackageName(out, e.state.Config.PkgName)
|
||||||
writeImports(out, imps)
|
writeImports(out, imps)
|
||||||
|
|
||||||
|
@ -138,6 +145,7 @@ func executeSingletonTemplates(e executeTemplateData) error {
|
||||||
thirdParty: e.importNamedSet[fName].thirdParty,
|
thirdParty: e.importNamedSet[fName].thirdParty,
|
||||||
}
|
}
|
||||||
|
|
||||||
|
writeFileDisclaimer(out)
|
||||||
writePackageName(out, e.state.Config.PkgName)
|
writePackageName(out, e.state.Config.PkgName)
|
||||||
writeImports(out, imps)
|
writeImports(out, imps)
|
||||||
|
|
||||||
|
@ -162,9 +170,10 @@ func generateTestMainOutput(state *State, data *templateData) error {
|
||||||
out.Reset()
|
out.Reset()
|
||||||
|
|
||||||
var imps imports
|
var imps imports
|
||||||
imps.standard = defaultTestMainImports[state.Config.DriverName].standard
|
imps.standard = state.Importer.TestMain[state.Config.DriverName].standard
|
||||||
imps.thirdParty = defaultTestMainImports[state.Config.DriverName].thirdParty
|
imps.thirdParty = state.Importer.TestMain[state.Config.DriverName].thirdParty
|
||||||
|
|
||||||
|
writeFileDisclaimer(out)
|
||||||
writePackageName(out, state.Config.PkgName)
|
writePackageName(out, state.Config.PkgName)
|
||||||
writeImports(out, imps)
|
writeImports(out, imps)
|
||||||
|
|
||||||
|
@ -179,6 +188,12 @@ func generateTestMainOutput(state *State, data *templateData) error {
|
||||||
return nil
|
return nil
|
||||||
}
|
}
|
||||||
|
|
||||||
|
// writeFileDisclaimer writes the disclaimer at the top with a trailing
|
||||||
|
// newline so the package name doesn't get attached to it.
|
||||||
|
func writeFileDisclaimer(out *bytes.Buffer) {
|
||||||
|
_, _ = out.Write(noEditDisclaimer)
|
||||||
|
}
|
||||||
|
|
||||||
// writePackageName writes the package name correctly, ignores errors
|
// writePackageName writes the package name correctly, ignores errors
|
||||||
// since it's to the concrete buffer type which produces none
|
// since it's to the concrete buffer type which produces none
|
||||||
func writePackageName(out *bytes.Buffer, pkgName string) {
|
func writePackageName(out *bytes.Buffer, pkgName string) {
|
|
@ -1,4 +1,4 @@
|
||||||
package main
|
package boilingcore
|
||||||
|
|
||||||
import (
|
import (
|
||||||
"bytes"
|
"bytes"
|
|
@ -1,15 +1,17 @@
|
||||||
package main
|
package boilingcore
|
||||||
|
|
||||||
import (
|
import (
|
||||||
"fmt"
|
"fmt"
|
||||||
|
"io/ioutil"
|
||||||
"path/filepath"
|
"path/filepath"
|
||||||
"sort"
|
"sort"
|
||||||
"strings"
|
"strings"
|
||||||
"text/template"
|
"text/template"
|
||||||
|
|
||||||
"github.com/vattle/sqlboiler/bdb"
|
"github.com/lbryio/sqlboiler/bdb"
|
||||||
"github.com/vattle/sqlboiler/queries"
|
"github.com/lbryio/sqlboiler/queries"
|
||||||
"github.com/vattle/sqlboiler/strmangle"
|
"github.com/lbryio/sqlboiler/strmangle"
|
||||||
|
"github.com/pkg/errors"
|
||||||
)
|
)
|
||||||
|
|
||||||
// templateData for sqlboiler templates
|
// templateData for sqlboiler templates
|
||||||
|
@ -109,7 +111,7 @@ func loadTemplates(dir string) (*templateList, error) {
|
||||||
return &templateList{Template: tpl}, err
|
return &templateList{Template: tpl}, err
|
||||||
}
|
}
|
||||||
|
|
||||||
// loadTemplate loads a single template file.
|
// loadTemplate loads a single template file
|
||||||
func loadTemplate(dir string, filename string) (*template.Template, error) {
|
func loadTemplate(dir string, filename string) (*template.Template, error) {
|
||||||
pattern := filepath.Join(dir, filename)
|
pattern := filepath.Join(dir, filename)
|
||||||
tpl, err := template.New("").Funcs(templateFunctions).ParseFiles(pattern)
|
tpl, err := template.New("").Funcs(templateFunctions).ParseFiles(pattern)
|
||||||
|
@ -121,6 +123,25 @@ func loadTemplate(dir string, filename string) (*template.Template, error) {
|
||||||
return tpl.Lookup(filename), err
|
return tpl.Lookup(filename), err
|
||||||
}
|
}
|
||||||
|
|
||||||
|
// replaceTemplate finds the template matching with name and replaces its
|
||||||
|
// contents with the contents of the template located at filename
|
||||||
|
func replaceTemplate(tpl *template.Template, name, filename string) error {
|
||||||
|
if tpl == nil {
|
||||||
|
return fmt.Errorf("template for %s is nil", name)
|
||||||
|
}
|
||||||
|
|
||||||
|
b, err := ioutil.ReadFile(filename)
|
||||||
|
if err != nil {
|
||||||
|
return errors.Wrapf(err, "failed reading template file: %s", filename)
|
||||||
|
}
|
||||||
|
|
||||||
|
if tpl, err = tpl.New(name).Funcs(templateFunctions).Parse(string(b)); err != nil {
|
||||||
|
return errors.Wrapf(err, "failed to parse template file: %s", filename)
|
||||||
|
}
|
||||||
|
|
||||||
|
return nil
|
||||||
|
}
|
||||||
|
|
||||||
// set is to stop duplication from named enums, allowing a template loop
|
// set is to stop duplication from named enums, allowing a template loop
|
||||||
// to keep some state
|
// to keep some state
|
||||||
type once map[string]struct{}
|
type once map[string]struct{}
|
||||||
|
@ -148,6 +169,7 @@ func (o once) Put(s string) bool {
|
||||||
var templateStringMappers = map[string]func(string) string{
|
var templateStringMappers = map[string]func(string) string{
|
||||||
// String ops
|
// String ops
|
||||||
"quoteWrap": func(a string) string { return fmt.Sprintf(`"%s"`, a) },
|
"quoteWrap": func(a string) string { return fmt.Sprintf(`"%s"`, a) },
|
||||||
|
"replaceReserved": strmangle.ReplaceReservedWords,
|
||||||
|
|
||||||
// Casing
|
// Casing
|
||||||
"titleCase": strmangle.TitleCase,
|
"titleCase": strmangle.TitleCase,
|
||||||
|
@ -203,6 +225,7 @@ var templateFunctions = template.FuncMap{
|
||||||
"txtsFromToMany": txtsFromToMany,
|
"txtsFromToMany": txtsFromToMany,
|
||||||
|
|
||||||
// dbdrivers ops
|
// dbdrivers ops
|
||||||
|
"filterColumnsByAuto": bdb.FilterColumnsByAuto,
|
||||||
"filterColumnsByDefault": bdb.FilterColumnsByDefault,
|
"filterColumnsByDefault": bdb.FilterColumnsByDefault,
|
||||||
"filterColumnsByEnum": bdb.FilterColumnsByEnum,
|
"filterColumnsByEnum": bdb.FilterColumnsByEnum,
|
||||||
"sqlColDefinitions": bdb.SQLColDefinitions,
|
"sqlColDefinitions": bdb.SQLColDefinitions,
|
|
@ -1,4 +1,4 @@
|
||||||
package main
|
package boilingcore
|
||||||
|
|
||||||
import (
|
import (
|
||||||
"sort"
|
"sort"
|
|
@ -1,11 +1,11 @@
|
||||||
package main
|
package boilingcore
|
||||||
|
|
||||||
import (
|
import (
|
||||||
"fmt"
|
"fmt"
|
||||||
"strings"
|
"strings"
|
||||||
|
|
||||||
"github.com/vattle/sqlboiler/bdb"
|
"github.com/lbryio/sqlboiler/bdb"
|
||||||
"github.com/vattle/sqlboiler/strmangle"
|
"github.com/lbryio/sqlboiler/strmangle"
|
||||||
)
|
)
|
||||||
|
|
||||||
// TxtToOne contains text that will be used by templates for a one-to-many or
|
// TxtToOne contains text that will be used by templates for a one-to-many or
|
|
@ -1,12 +1,12 @@
|
||||||
package main
|
package boilingcore
|
||||||
|
|
||||||
import (
|
import (
|
||||||
"reflect"
|
"reflect"
|
||||||
"testing"
|
"testing"
|
||||||
|
|
||||||
"github.com/davecgh/go-spew/spew"
|
"github.com/davecgh/go-spew/spew"
|
||||||
"github.com/vattle/sqlboiler/bdb"
|
"github.com/lbryio/sqlboiler/bdb"
|
||||||
"github.com/vattle/sqlboiler/bdb/drivers"
|
"github.com/lbryio/sqlboiler/bdb/drivers"
|
||||||
)
|
)
|
||||||
|
|
||||||
func TestTxtsFromOne(t *testing.T) {
|
func TestTxtsFromOne(t *testing.T) {
|
28
circle.yml
28
circle.yml
|
@ -1,28 +0,0 @@
|
||||||
test:
|
|
||||||
pre:
|
|
||||||
- mkdir -p /home/ubuntu/.go_workspace/src/github.com/jstemmer
|
|
||||||
- go get -u github.com/jstemmer/go-junit-report
|
|
||||||
- echo -e "[postgres]\nhost=\"localhost\"\nport=5432\nuser=\"ubuntu\"\ndbname=\"sqlboiler\"\n[mysql]\nhost=\"localhost\"\nport=3306\nuser=\"ubuntu\"\ndbname=\"sqlboiler\"\nsslmode=\"false\"" > sqlboiler.toml
|
|
||||||
- createdb -U ubuntu sqlboiler
|
|
||||||
- psql -U ubuntu sqlboiler < ./testdata/postgres_test_schema.sql
|
|
||||||
- echo "create database sqlboiler;" | mysql -u ubuntu
|
|
||||||
- mysql -u ubuntu sqlboiler < ./testdata/mysql_test_schema.sql
|
|
||||||
- ./sqlboiler postgres -o "postgres"
|
|
||||||
- ./sqlboiler postgres -o "mysql"
|
|
||||||
override:
|
|
||||||
- go test -v -race ./... > $CIRCLE_ARTIFACTS/gotest.txt
|
|
||||||
post:
|
|
||||||
- cat $CIRCLE_ARTIFACTS/gotest.txt | go-junit-report > $CIRCLE_TEST_REPORTS/junit.xml
|
|
||||||
|
|
||||||
machine:
|
|
||||||
environment:
|
|
||||||
GODIST: "go1.7.linux-amd64.tar.gz"
|
|
||||||
post:
|
|
||||||
- mkdir -p download
|
|
||||||
- test -e download/$GODIST || curl -o download/$GODIST https://storage.googleapis.com/golang/$GODIST
|
|
||||||
- sudo rm -rf /usr/local/go
|
|
||||||
- sudo tar -C /usr/local -xzf download/$GODIST
|
|
||||||
|
|
||||||
dependencies:
|
|
||||||
cache_directories:
|
|
||||||
- ~/download
|
|
348
imports.go
348
imports.go
|
@ -1,348 +0,0 @@
|
||||||
package main
|
|
||||||
|
|
||||||
import (
|
|
||||||
"bytes"
|
|
||||||
"fmt"
|
|
||||||
"sort"
|
|
||||||
"strings"
|
|
||||||
|
|
||||||
"github.com/vattle/sqlboiler/bdb"
|
|
||||||
)
|
|
||||||
|
|
||||||
// imports defines the optional standard imports and
|
|
||||||
// thirdParty imports (from github for example)
|
|
||||||
type imports struct {
|
|
||||||
standard importList
|
|
||||||
thirdParty importList
|
|
||||||
}
|
|
||||||
|
|
||||||
// importList is a list of import names
|
|
||||||
type importList []string
|
|
||||||
|
|
||||||
func (i importList) Len() int {
|
|
||||||
return len(i)
|
|
||||||
}
|
|
||||||
|
|
||||||
func (i importList) Swap(k, j int) {
|
|
||||||
i[k], i[j] = i[j], i[k]
|
|
||||||
}
|
|
||||||
|
|
||||||
func (i importList) Less(k, j int) bool {
|
|
||||||
res := strings.Compare(strings.TrimLeft(i[k], "_ "), strings.TrimLeft(i[j], "_ "))
|
|
||||||
if res <= 0 {
|
|
||||||
return true
|
|
||||||
}
|
|
||||||
|
|
||||||
return false
|
|
||||||
}
|
|
||||||
|
|
||||||
func combineImports(a, b imports) imports {
|
|
||||||
var c imports
|
|
||||||
|
|
||||||
c.standard = removeDuplicates(combineStringSlices(a.standard, b.standard))
|
|
||||||
c.thirdParty = removeDuplicates(combineStringSlices(a.thirdParty, b.thirdParty))
|
|
||||||
|
|
||||||
sort.Sort(c.standard)
|
|
||||||
sort.Sort(c.thirdParty)
|
|
||||||
|
|
||||||
return c
|
|
||||||
}
|
|
||||||
|
|
||||||
func combineTypeImports(a imports, b map[string]imports, columns []bdb.Column) imports {
|
|
||||||
tmpImp := imports{
|
|
||||||
standard: make(importList, len(a.standard)),
|
|
||||||
thirdParty: make(importList, len(a.thirdParty)),
|
|
||||||
}
|
|
||||||
|
|
||||||
copy(tmpImp.standard, a.standard)
|
|
||||||
copy(tmpImp.thirdParty, a.thirdParty)
|
|
||||||
|
|
||||||
for _, col := range columns {
|
|
||||||
for key, imp := range b {
|
|
||||||
if col.Type == key {
|
|
||||||
tmpImp.standard = append(tmpImp.standard, imp.standard...)
|
|
||||||
tmpImp.thirdParty = append(tmpImp.thirdParty, imp.thirdParty...)
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
tmpImp.standard = removeDuplicates(tmpImp.standard)
|
|
||||||
tmpImp.thirdParty = removeDuplicates(tmpImp.thirdParty)
|
|
||||||
|
|
||||||
sort.Sort(tmpImp.standard)
|
|
||||||
sort.Sort(tmpImp.thirdParty)
|
|
||||||
|
|
||||||
return tmpImp
|
|
||||||
}
|
|
||||||
|
|
||||||
func buildImportString(imps imports) []byte {
|
|
||||||
stdlen, thirdlen := len(imps.standard), len(imps.thirdParty)
|
|
||||||
if stdlen+thirdlen < 1 {
|
|
||||||
return []byte{}
|
|
||||||
}
|
|
||||||
|
|
||||||
if stdlen+thirdlen == 1 {
|
|
||||||
var imp string
|
|
||||||
if stdlen == 1 {
|
|
||||||
imp = imps.standard[0]
|
|
||||||
} else {
|
|
||||||
imp = imps.thirdParty[0]
|
|
||||||
}
|
|
||||||
return []byte(fmt.Sprintf("import %s", imp))
|
|
||||||
}
|
|
||||||
|
|
||||||
buf := &bytes.Buffer{}
|
|
||||||
buf.WriteString("import (")
|
|
||||||
for _, std := range imps.standard {
|
|
||||||
fmt.Fprintf(buf, "\n\t%s", std)
|
|
||||||
}
|
|
||||||
if stdlen != 0 && thirdlen != 0 {
|
|
||||||
buf.WriteString("\n")
|
|
||||||
}
|
|
||||||
for _, third := range imps.thirdParty {
|
|
||||||
fmt.Fprintf(buf, "\n\t%s", third)
|
|
||||||
}
|
|
||||||
buf.WriteString("\n)\n")
|
|
||||||
|
|
||||||
return buf.Bytes()
|
|
||||||
}
|
|
||||||
|
|
||||||
func combineStringSlices(a, b []string) []string {
|
|
||||||
c := make([]string, len(a)+len(b))
|
|
||||||
if len(a) > 0 {
|
|
||||||
copy(c, a)
|
|
||||||
}
|
|
||||||
if len(b) > 0 {
|
|
||||||
copy(c[len(a):], b)
|
|
||||||
}
|
|
||||||
|
|
||||||
return c
|
|
||||||
}
|
|
||||||
|
|
||||||
func removeDuplicates(dedup []string) []string {
|
|
||||||
if len(dedup) <= 1 {
|
|
||||||
return dedup
|
|
||||||
}
|
|
||||||
|
|
||||||
for i := 0; i < len(dedup)-1; i++ {
|
|
||||||
for j := i + 1; j < len(dedup); j++ {
|
|
||||||
if dedup[i] != dedup[j] {
|
|
||||||
continue
|
|
||||||
}
|
|
||||||
|
|
||||||
if j != len(dedup)-1 {
|
|
||||||
dedup[j] = dedup[len(dedup)-1]
|
|
||||||
j--
|
|
||||||
}
|
|
||||||
dedup = dedup[:len(dedup)-1]
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
return dedup
|
|
||||||
}
|
|
||||||
|
|
||||||
var defaultTemplateImports = imports{
|
|
||||||
standard: importList{
|
|
||||||
`"bytes"`,
|
|
||||||
`"database/sql"`,
|
|
||||||
`"fmt"`,
|
|
||||||
`"reflect"`,
|
|
||||||
`"strings"`,
|
|
||||||
`"sync"`,
|
|
||||||
`"time"`,
|
|
||||||
},
|
|
||||||
thirdParty: importList{
|
|
||||||
`"github.com/pkg/errors"`,
|
|
||||||
`"github.com/vattle/sqlboiler/boil"`,
|
|
||||||
`"github.com/vattle/sqlboiler/queries"`,
|
|
||||||
`"github.com/vattle/sqlboiler/queries/qm"`,
|
|
||||||
`"github.com/vattle/sqlboiler/strmangle"`,
|
|
||||||
},
|
|
||||||
}
|
|
||||||
|
|
||||||
var defaultSingletonTemplateImports = map[string]imports{
|
|
||||||
"boil_queries": {
|
|
||||||
thirdParty: importList{
|
|
||||||
`"github.com/vattle/sqlboiler/boil"`,
|
|
||||||
`"github.com/vattle/sqlboiler/queries"`,
|
|
||||||
`"github.com/vattle/sqlboiler/queries/qm"`,
|
|
||||||
},
|
|
||||||
},
|
|
||||||
"boil_types": {
|
|
||||||
thirdParty: importList{
|
|
||||||
`"github.com/pkg/errors"`,
|
|
||||||
`"github.com/vattle/sqlboiler/strmangle"`,
|
|
||||||
},
|
|
||||||
},
|
|
||||||
}
|
|
||||||
|
|
||||||
var defaultTestTemplateImports = imports{
|
|
||||||
standard: importList{
|
|
||||||
`"bytes"`,
|
|
||||||
`"reflect"`,
|
|
||||||
`"testing"`,
|
|
||||||
},
|
|
||||||
thirdParty: importList{
|
|
||||||
`"github.com/vattle/sqlboiler/boil"`,
|
|
||||||
`"github.com/vattle/sqlboiler/randomize"`,
|
|
||||||
`"github.com/vattle/sqlboiler/strmangle"`,
|
|
||||||
},
|
|
||||||
}
|
|
||||||
|
|
||||||
var defaultSingletonTestTemplateImports = map[string]imports{
|
|
||||||
"boil_main_test": {
|
|
||||||
standard: importList{
|
|
||||||
`"database/sql"`,
|
|
||||||
`"flag"`,
|
|
||||||
`"fmt"`,
|
|
||||||
`"math/rand"`,
|
|
||||||
`"os"`,
|
|
||||||
`"path/filepath"`,
|
|
||||||
`"testing"`,
|
|
||||||
`"time"`,
|
|
||||||
},
|
|
||||||
thirdParty: importList{
|
|
||||||
`"github.com/kat-co/vala"`,
|
|
||||||
`"github.com/pkg/errors"`,
|
|
||||||
`"github.com/spf13/viper"`,
|
|
||||||
`"github.com/vattle/sqlboiler/boil"`,
|
|
||||||
},
|
|
||||||
},
|
|
||||||
"boil_queries_test": {
|
|
||||||
standard: importList{
|
|
||||||
`"bytes"`,
|
|
||||||
`"fmt"`,
|
|
||||||
`"io"`,
|
|
||||||
`"io/ioutil"`,
|
|
||||||
`"math/rand"`,
|
|
||||||
`"regexp"`,
|
|
||||||
},
|
|
||||||
thirdParty: importList{
|
|
||||||
`"github.com/vattle/sqlboiler/boil"`,
|
|
||||||
},
|
|
||||||
},
|
|
||||||
"boil_suites_test": {
|
|
||||||
standard: importList{
|
|
||||||
`"testing"`,
|
|
||||||
},
|
|
||||||
},
|
|
||||||
}
|
|
||||||
|
|
||||||
var defaultTestMainImports = map[string]imports{
|
|
||||||
"postgres": {
|
|
||||||
standard: importList{
|
|
||||||
`"bytes"`,
|
|
||||||
`"database/sql"`,
|
|
||||||
`"fmt"`,
|
|
||||||
`"io"`,
|
|
||||||
`"io/ioutil"`,
|
|
||||||
`"os"`,
|
|
||||||
`"os/exec"`,
|
|
||||||
`"strings"`,
|
|
||||||
},
|
|
||||||
thirdParty: importList{
|
|
||||||
`"github.com/pkg/errors"`,
|
|
||||||
`"github.com/spf13/viper"`,
|
|
||||||
`"github.com/vattle/sqlboiler/bdb/drivers"`,
|
|
||||||
`"github.com/vattle/sqlboiler/randomize"`,
|
|
||||||
`_ "github.com/lib/pq"`,
|
|
||||||
},
|
|
||||||
},
|
|
||||||
"mysql": {
|
|
||||||
standard: importList{
|
|
||||||
`"bytes"`,
|
|
||||||
`"database/sql"`,
|
|
||||||
`"fmt"`,
|
|
||||||
`"io"`,
|
|
||||||
`"io/ioutil"`,
|
|
||||||
`"os"`,
|
|
||||||
`"os/exec"`,
|
|
||||||
`"strings"`,
|
|
||||||
},
|
|
||||||
thirdParty: importList{
|
|
||||||
`"github.com/pkg/errors"`,
|
|
||||||
`"github.com/spf13/viper"`,
|
|
||||||
`"github.com/vattle/sqlboiler/bdb/drivers"`,
|
|
||||||
`"github.com/vattle/sqlboiler/randomize"`,
|
|
||||||
`_ "github.com/go-sql-driver/mysql"`,
|
|
||||||
},
|
|
||||||
},
|
|
||||||
}
|
|
||||||
|
|
||||||
// importsBasedOnType imports are only included in the template output if the
|
|
||||||
// database requires one of the following special types. Check
|
|
||||||
// TranslateColumnType to see the type assignments.
|
|
||||||
var importsBasedOnType = map[string]imports{
|
|
||||||
"null.Float32": {
|
|
||||||
thirdParty: importList{`"gopkg.in/nullbio/null.v6"`},
|
|
||||||
},
|
|
||||||
"null.Float64": {
|
|
||||||
thirdParty: importList{`"gopkg.in/nullbio/null.v6"`},
|
|
||||||
},
|
|
||||||
"null.Int": {
|
|
||||||
thirdParty: importList{`"gopkg.in/nullbio/null.v6"`},
|
|
||||||
},
|
|
||||||
"null.Int8": {
|
|
||||||
thirdParty: importList{`"gopkg.in/nullbio/null.v6"`},
|
|
||||||
},
|
|
||||||
"null.Int16": {
|
|
||||||
thirdParty: importList{`"gopkg.in/nullbio/null.v6"`},
|
|
||||||
},
|
|
||||||
"null.Int32": {
|
|
||||||
thirdParty: importList{`"gopkg.in/nullbio/null.v6"`},
|
|
||||||
},
|
|
||||||
"null.Int64": {
|
|
||||||
thirdParty: importList{`"gopkg.in/nullbio/null.v6"`},
|
|
||||||
},
|
|
||||||
"null.Uint": {
|
|
||||||
thirdParty: importList{`"gopkg.in/nullbio/null.v6"`},
|
|
||||||
},
|
|
||||||
"null.Uint8": {
|
|
||||||
thirdParty: importList{`"gopkg.in/nullbio/null.v6"`},
|
|
||||||
},
|
|
||||||
"null.Uint16": {
|
|
||||||
thirdParty: importList{`"gopkg.in/nullbio/null.v6"`},
|
|
||||||
},
|
|
||||||
"null.Uint32": {
|
|
||||||
thirdParty: importList{`"gopkg.in/nullbio/null.v6"`},
|
|
||||||
},
|
|
||||||
"null.Uint64": {
|
|
||||||
thirdParty: importList{`"gopkg.in/nullbio/null.v6"`},
|
|
||||||
},
|
|
||||||
"null.String": {
|
|
||||||
thirdParty: importList{`"gopkg.in/nullbio/null.v6"`},
|
|
||||||
},
|
|
||||||
"null.Bool": {
|
|
||||||
thirdParty: importList{`"gopkg.in/nullbio/null.v6"`},
|
|
||||||
},
|
|
||||||
"null.Time": {
|
|
||||||
thirdParty: importList{`"gopkg.in/nullbio/null.v6"`},
|
|
||||||
},
|
|
||||||
"null.JSON": {
|
|
||||||
thirdParty: importList{`"gopkg.in/nullbio/null.v6"`},
|
|
||||||
},
|
|
||||||
"null.Bytes": {
|
|
||||||
thirdParty: importList{`"gopkg.in/nullbio/null.v6"`},
|
|
||||||
},
|
|
||||||
"time.Time": {
|
|
||||||
standard: importList{`"time"`},
|
|
||||||
},
|
|
||||||
"types.JSON": {
|
|
||||||
thirdParty: importList{`"github.com/vattle/sqlboiler/types"`},
|
|
||||||
},
|
|
||||||
"types.BytesArray": {
|
|
||||||
thirdParty: importList{`"github.com/vattle/sqlboiler/types"`},
|
|
||||||
},
|
|
||||||
"types.Int64Array": {
|
|
||||||
thirdParty: importList{`"github.com/vattle/sqlboiler/types"`},
|
|
||||||
},
|
|
||||||
"types.Float64Array": {
|
|
||||||
thirdParty: importList{`"github.com/vattle/sqlboiler/types"`},
|
|
||||||
},
|
|
||||||
"types.BoolArray": {
|
|
||||||
thirdParty: importList{`"github.com/vattle/sqlboiler/types"`},
|
|
||||||
},
|
|
||||||
"types.Hstore": {
|
|
||||||
thirdParty: importList{`"github.com/vattle/sqlboiler/types"`},
|
|
||||||
},
|
|
||||||
}
|
|
222
imports_test.go
222
imports_test.go
|
@ -1,222 +0,0 @@
|
||||||
package main
|
|
||||||
|
|
||||||
import (
|
|
||||||
"reflect"
|
|
||||||
"sort"
|
|
||||||
"testing"
|
|
||||||
|
|
||||||
"github.com/pkg/errors"
|
|
||||||
"github.com/vattle/sqlboiler/bdb"
|
|
||||||
)
|
|
||||||
|
|
||||||
func TestImportsSort(t *testing.T) {
|
|
||||||
t.Parallel()
|
|
||||||
|
|
||||||
a1 := importList{
|
|
||||||
`"fmt"`,
|
|
||||||
`"errors"`,
|
|
||||||
}
|
|
||||||
a2 := importList{
|
|
||||||
`_ "github.com/lib/pq"`,
|
|
||||||
`_ "github.com/gorilla/n"`,
|
|
||||||
`"github.com/gorilla/mux"`,
|
|
||||||
`"github.com/gorilla/websocket"`,
|
|
||||||
}
|
|
||||||
|
|
||||||
a1Expected := importList{`"errors"`, `"fmt"`}
|
|
||||||
a2Expected := importList{
|
|
||||||
`"github.com/gorilla/mux"`,
|
|
||||||
`_ "github.com/gorilla/n"`,
|
|
||||||
`"github.com/gorilla/websocket"`,
|
|
||||||
`_ "github.com/lib/pq"`,
|
|
||||||
}
|
|
||||||
|
|
||||||
sort.Sort(a1)
|
|
||||||
if !reflect.DeepEqual(a1, a1Expected) {
|
|
||||||
t.Errorf("Expected a1 to match a1Expected, got: %v", a1)
|
|
||||||
}
|
|
||||||
|
|
||||||
for i, v := range a1 {
|
|
||||||
if v != a1Expected[i] {
|
|
||||||
t.Errorf("Expected a1[%d] to match a1Expected[%d]:\n%s\n%s\n", i, i, v, a1Expected[i])
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
sort.Sort(a2)
|
|
||||||
if !reflect.DeepEqual(a2, a2Expected) {
|
|
||||||
t.Errorf("Expected a2 to match a2expected, got: %v", a2)
|
|
||||||
}
|
|
||||||
|
|
||||||
for i, v := range a2 {
|
|
||||||
if v != a2Expected[i] {
|
|
||||||
t.Errorf("Expected a2[%d] to match a2Expected[%d]:\n%s\n%s\n", i, i, v, a1Expected[i])
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
func TestCombineTypeImports(t *testing.T) {
|
|
||||||
t.Parallel()
|
|
||||||
|
|
||||||
imports1 := imports{
|
|
||||||
standard: importList{
|
|
||||||
`"errors"`,
|
|
||||||
`"fmt"`,
|
|
||||||
},
|
|
||||||
thirdParty: importList{
|
|
||||||
`"github.com/vattle/sqlboiler/boil"`,
|
|
||||||
},
|
|
||||||
}
|
|
||||||
|
|
||||||
importsExpected := imports{
|
|
||||||
standard: importList{
|
|
||||||
`"errors"`,
|
|
||||||
`"fmt"`,
|
|
||||||
`"time"`,
|
|
||||||
},
|
|
||||||
thirdParty: importList{
|
|
||||||
`"github.com/vattle/sqlboiler/boil"`,
|
|
||||||
`"gopkg.in/nullbio/null.v6"`,
|
|
||||||
},
|
|
||||||
}
|
|
||||||
|
|
||||||
cols := []bdb.Column{
|
|
||||||
{
|
|
||||||
Type: "null.Time",
|
|
||||||
},
|
|
||||||
{
|
|
||||||
Type: "null.Time",
|
|
||||||
},
|
|
||||||
{
|
|
||||||
Type: "time.Time",
|
|
||||||
},
|
|
||||||
{
|
|
||||||
Type: "null.Float",
|
|
||||||
},
|
|
||||||
}
|
|
||||||
|
|
||||||
res1 := combineTypeImports(imports1, importsBasedOnType, cols)
|
|
||||||
|
|
||||||
if !reflect.DeepEqual(res1, importsExpected) {
|
|
||||||
t.Errorf("Expected res1 to match importsExpected, got:\n\n%#v\n", res1)
|
|
||||||
}
|
|
||||||
|
|
||||||
imports2 := imports{
|
|
||||||
standard: importList{
|
|
||||||
`"errors"`,
|
|
||||||
`"fmt"`,
|
|
||||||
`"time"`,
|
|
||||||
},
|
|
||||||
thirdParty: importList{
|
|
||||||
`"github.com/vattle/sqlboiler/boil"`,
|
|
||||||
`"gopkg.in/nullbio/null.v6"`,
|
|
||||||
},
|
|
||||||
}
|
|
||||||
|
|
||||||
res2 := combineTypeImports(imports2, importsBasedOnType, cols)
|
|
||||||
|
|
||||||
if !reflect.DeepEqual(res2, importsExpected) {
|
|
||||||
t.Errorf("Expected res2 to match importsExpected, got:\n\n%#v\n", res1)
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
func TestCombineImports(t *testing.T) {
|
|
||||||
t.Parallel()
|
|
||||||
|
|
||||||
a := imports{
|
|
||||||
standard: importList{"fmt"},
|
|
||||||
thirdParty: importList{"github.com/vattle/sqlboiler", "gopkg.in/nullbio/null.v6"},
|
|
||||||
}
|
|
||||||
b := imports{
|
|
||||||
standard: importList{"os"},
|
|
||||||
thirdParty: importList{"github.com/vattle/sqlboiler"},
|
|
||||||
}
|
|
||||||
|
|
||||||
c := combineImports(a, b)
|
|
||||||
|
|
||||||
if c.standard[0] != "fmt" && c.standard[1] != "os" {
|
|
||||||
t.Errorf("Wanted: fmt, os got: %#v", c.standard)
|
|
||||||
}
|
|
||||||
if c.thirdParty[0] != "github.com/vattle/sqlboiler" && c.thirdParty[1] != "gopkg.in/nullbio/null.v6" {
|
|
||||||
t.Errorf("Wanted: github.com/vattle/sqlboiler, gopkg.in/nullbio/null.v6 got: %#v", c.thirdParty)
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
func TestRemoveDuplicates(t *testing.T) {
|
|
||||||
t.Parallel()
|
|
||||||
|
|
||||||
hasDups := func(possible []string) error {
|
|
||||||
for i := 0; i < len(possible)-1; i++ {
|
|
||||||
for j := i + 1; j < len(possible); j++ {
|
|
||||||
if possible[i] == possible[j] {
|
|
||||||
return errors.Errorf("found duplicate: %s [%d] [%d]", possible[i], i, j)
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
return nil
|
|
||||||
}
|
|
||||||
|
|
||||||
if len(removeDuplicates([]string{})) != 0 {
|
|
||||||
t.Error("It should have returned an empty slice")
|
|
||||||
}
|
|
||||||
|
|
||||||
oneItem := []string{"patrick"}
|
|
||||||
slice := removeDuplicates(oneItem)
|
|
||||||
if ln := len(slice); ln != 1 {
|
|
||||||
t.Error("Length was wrong:", ln)
|
|
||||||
} else if oneItem[0] != slice[0] {
|
|
||||||
t.Errorf("Slices differ: %#v %#v", oneItem, slice)
|
|
||||||
}
|
|
||||||
|
|
||||||
slice = removeDuplicates([]string{"hello", "patrick", "hello"})
|
|
||||||
if ln := len(slice); ln != 2 {
|
|
||||||
t.Error("Length was wrong:", ln)
|
|
||||||
}
|
|
||||||
if err := hasDups(slice); err != nil {
|
|
||||||
t.Error(err)
|
|
||||||
}
|
|
||||||
|
|
||||||
slice = removeDuplicates([]string{"five", "patrick", "hello", "hello", "patrick", "hello", "hello"})
|
|
||||||
if ln := len(slice); ln != 3 {
|
|
||||||
t.Error("Length was wrong:", ln)
|
|
||||||
}
|
|
||||||
if err := hasDups(slice); err != nil {
|
|
||||||
t.Error(err)
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
func TestCombineStringSlices(t *testing.T) {
|
|
||||||
t.Parallel()
|
|
||||||
|
|
||||||
var a, b []string
|
|
||||||
slice := combineStringSlices(a, b)
|
|
||||||
if ln := len(slice); ln != 0 {
|
|
||||||
t.Error("Len was wrong:", ln)
|
|
||||||
}
|
|
||||||
|
|
||||||
a = []string{"1", "2"}
|
|
||||||
slice = combineStringSlices(a, b)
|
|
||||||
if ln := len(slice); ln != 2 {
|
|
||||||
t.Error("Len was wrong:", ln)
|
|
||||||
} else if slice[0] != a[0] || slice[1] != a[1] {
|
|
||||||
t.Errorf("Slice mismatch: %#v %#v", a, slice)
|
|
||||||
}
|
|
||||||
|
|
||||||
b = a
|
|
||||||
a = nil
|
|
||||||
slice = combineStringSlices(a, b)
|
|
||||||
if ln := len(slice); ln != 2 {
|
|
||||||
t.Error("Len was wrong:", ln)
|
|
||||||
} else if slice[0] != b[0] || slice[1] != b[1] {
|
|
||||||
t.Errorf("Slice mismatch: %#v %#v", b, slice)
|
|
||||||
}
|
|
||||||
|
|
||||||
a = b
|
|
||||||
b = []string{"3", "4"}
|
|
||||||
slice = combineStringSlices(a, b)
|
|
||||||
if ln := len(slice); ln != 4 {
|
|
||||||
t.Error("Len was wrong:", ln)
|
|
||||||
} else if slice[0] != a[0] || slice[1] != a[1] || slice[2] != b[0] || slice[3] != b[1] {
|
|
||||||
t.Errorf("Slice mismatch: %#v + %#v != #%v", a, b, slice)
|
|
||||||
}
|
|
||||||
}
|
|
90
main.go
90
main.go
|
@ -8,15 +8,17 @@ import (
|
||||||
"strings"
|
"strings"
|
||||||
|
|
||||||
"github.com/kat-co/vala"
|
"github.com/kat-co/vala"
|
||||||
|
"github.com/lbryio/sqlboiler/bdb/drivers"
|
||||||
|
"github.com/lbryio/sqlboiler/boilingcore"
|
||||||
"github.com/spf13/cobra"
|
"github.com/spf13/cobra"
|
||||||
"github.com/spf13/viper"
|
"github.com/spf13/viper"
|
||||||
)
|
)
|
||||||
|
|
||||||
const sqlBoilerVersion = "2.1.1"
|
const sqlBoilerVersion = "2.4.0+lbry"
|
||||||
|
|
||||||
var (
|
var (
|
||||||
cmdState *State
|
cmdState *boilingcore.State
|
||||||
cmdConfig *Config
|
cmdConfig *boilingcore.Config
|
||||||
)
|
)
|
||||||
|
|
||||||
func main() {
|
func main() {
|
||||||
|
@ -60,7 +62,7 @@ func main() {
|
||||||
Use: "sqlboiler [flags] <driver>",
|
Use: "sqlboiler [flags] <driver>",
|
||||||
Short: "SQL Boiler generates an ORM tailored to your database schema.",
|
Short: "SQL Boiler generates an ORM tailored to your database schema.",
|
||||||
Long: "SQL Boiler generates a Go ORM from template files, tailored to your database schema.\n" +
|
Long: "SQL Boiler generates a Go ORM from template files, tailored to your database schema.\n" +
|
||||||
`Complete documentation is available at http://github.com/vattle/sqlboiler`,
|
`Complete documentation is available at http://github.com/lbryio/sqlboiler`,
|
||||||
Example: `sqlboiler postgres`,
|
Example: `sqlboiler postgres`,
|
||||||
PreRunE: preRun,
|
PreRunE: preRun,
|
||||||
RunE: run,
|
RunE: run,
|
||||||
|
@ -71,22 +73,30 @@ func main() {
|
||||||
|
|
||||||
// Set up the cobra root command flags
|
// Set up the cobra root command flags
|
||||||
rootCmd.PersistentFlags().StringP("output", "o", "models", "The name of the folder to output to")
|
rootCmd.PersistentFlags().StringP("output", "o", "models", "The name of the folder to output to")
|
||||||
rootCmd.PersistentFlags().StringP("schema", "s", "public", "The name of your database schema, for databases that support real schemas")
|
rootCmd.PersistentFlags().StringP("schema", "s", "", "schema name for drivers that support it (default psql: public, mssql: dbo)")
|
||||||
rootCmd.PersistentFlags().StringP("pkgname", "p", "models", "The name you wish to assign to your generated package")
|
rootCmd.PersistentFlags().StringP("pkgname", "p", "models", "The name you wish to assign to your generated package")
|
||||||
rootCmd.PersistentFlags().StringP("basedir", "", "", "The base directory has the templates and templates_test folders")
|
rootCmd.PersistentFlags().StringP("basedir", "", "", "The base directory has the templates and templates_test folders")
|
||||||
rootCmd.PersistentFlags().StringSliceP("blacklist", "b", nil, "Do not include these tables in your generated package")
|
rootCmd.PersistentFlags().StringSliceP("blacklist", "b", nil, "Do not include these tables in your generated package")
|
||||||
rootCmd.PersistentFlags().StringSliceP("whitelist", "w", nil, "Only include these tables in your generated package")
|
rootCmd.PersistentFlags().StringSliceP("whitelist", "w", nil, "Only include these tables in your generated package")
|
||||||
rootCmd.PersistentFlags().StringSliceP("tag", "t", nil, "Struct tags to be included on your models in addition to json, yaml, toml")
|
rootCmd.PersistentFlags().StringSliceP("tag", "t", nil, "Struct tags to be included on your models in addition to json, yaml, toml")
|
||||||
|
rootCmd.PersistentFlags().StringSliceP("replace", "", nil, "Replace templates by directory: relpath/to_file.tpl:relpath/to_replacement.tpl")
|
||||||
rootCmd.PersistentFlags().BoolP("debug", "d", false, "Debug mode prints stack traces on error")
|
rootCmd.PersistentFlags().BoolP("debug", "d", false, "Debug mode prints stack traces on error")
|
||||||
rootCmd.PersistentFlags().BoolP("no-tests", "", false, "Disable generated go test files")
|
rootCmd.PersistentFlags().BoolP("no-tests", "", false, "Disable generated go test files")
|
||||||
rootCmd.PersistentFlags().BoolP("no-hooks", "", false, "Disable hooks feature for your models")
|
rootCmd.PersistentFlags().BoolP("no-hooks", "", false, "Disable hooks feature for your models")
|
||||||
rootCmd.PersistentFlags().BoolP("no-auto-timestamps", "", false, "Disable automatic timestamps for created_at/updated_at")
|
rootCmd.PersistentFlags().BoolP("no-auto-timestamps", "", false, "Disable automatic timestamps for created_at/updated_at")
|
||||||
rootCmd.PersistentFlags().BoolP("version", "", false, "Print the version")
|
rootCmd.PersistentFlags().BoolP("version", "", false, "Print the version")
|
||||||
|
rootCmd.PersistentFlags().BoolP("tinyint-as-bool", "", false, "Map MySQL tinyint(1) in Go to bool instead of int8")
|
||||||
|
rootCmd.PersistentFlags().BoolP("wipe", "", false, "Delete the output folder (rm -rf) before generation to ensure sanity")
|
||||||
|
|
||||||
|
// hide flags not recommended for use
|
||||||
|
rootCmd.PersistentFlags().MarkHidden("replace")
|
||||||
|
|
||||||
viper.SetDefault("postgres.sslmode", "require")
|
viper.SetDefault("postgres.sslmode", "require")
|
||||||
viper.SetDefault("postgres.port", "5432")
|
viper.SetDefault("postgres.port", "5432")
|
||||||
viper.SetDefault("mysql.sslmode", "true")
|
viper.SetDefault("mysql.sslmode", "true")
|
||||||
viper.SetDefault("mysql.port", "3306")
|
viper.SetDefault("mysql.port", "3306")
|
||||||
|
viper.SetDefault("mssql.sslmode", "true")
|
||||||
|
viper.SetDefault("mssql.port", "1433")
|
||||||
|
|
||||||
viper.BindPFlags(rootCmd.PersistentFlags())
|
viper.BindPFlags(rootCmd.PersistentFlags())
|
||||||
viper.AutomaticEnv()
|
viper.AutomaticEnv()
|
||||||
|
@ -120,7 +130,7 @@ func preRun(cmd *cobra.Command, args []string) error {
|
||||||
|
|
||||||
driverName := args[0]
|
driverName := args[0]
|
||||||
|
|
||||||
cmdConfig = &Config{
|
cmdConfig = &boilingcore.Config{
|
||||||
DriverName: driverName,
|
DriverName: driverName,
|
||||||
OutFolder: viper.GetString("output"),
|
OutFolder: viper.GetString("output"),
|
||||||
Schema: viper.GetString("schema"),
|
Schema: viper.GetString("schema"),
|
||||||
|
@ -130,6 +140,7 @@ func preRun(cmd *cobra.Command, args []string) error {
|
||||||
NoTests: viper.GetBool("no-tests"),
|
NoTests: viper.GetBool("no-tests"),
|
||||||
NoHooks: viper.GetBool("no-hooks"),
|
NoHooks: viper.GetBool("no-hooks"),
|
||||||
NoAutoTimestamps: viper.GetBool("no-auto-timestamps"),
|
NoAutoTimestamps: viper.GetBool("no-auto-timestamps"),
|
||||||
|
Wipe: viper.GetBool("wipe"),
|
||||||
}
|
}
|
||||||
|
|
||||||
// BUG: https://github.com/spf13/viper/issues/200
|
// BUG: https://github.com/spf13/viper/issues/200
|
||||||
|
@ -137,7 +148,7 @@ func preRun(cmd *cobra.Command, args []string) error {
|
||||||
// detect a malformed value coming out of viper.
|
// detect a malformed value coming out of viper.
|
||||||
// Once the bug is fixed we'll be able to move this into the init above
|
// Once the bug is fixed we'll be able to move this into the init above
|
||||||
cmdConfig.BlacklistTables = viper.GetStringSlice("blacklist")
|
cmdConfig.BlacklistTables = viper.GetStringSlice("blacklist")
|
||||||
if len(cmdConfig.BlacklistTables) == 1 && strings.HasPrefix(cmdConfig.BlacklistTables[0], "[") {
|
if len(cmdConfig.BlacklistTables) == 1 && strings.ContainsRune(cmdConfig.BlacklistTables[0], ',') {
|
||||||
cmdConfig.BlacklistTables, err = cmd.PersistentFlags().GetStringSlice("blacklist")
|
cmdConfig.BlacklistTables, err = cmd.PersistentFlags().GetStringSlice("blacklist")
|
||||||
if err != nil {
|
if err != nil {
|
||||||
return err
|
return err
|
||||||
|
@ -145,7 +156,7 @@ func preRun(cmd *cobra.Command, args []string) error {
|
||||||
}
|
}
|
||||||
|
|
||||||
cmdConfig.WhitelistTables = viper.GetStringSlice("whitelist")
|
cmdConfig.WhitelistTables = viper.GetStringSlice("whitelist")
|
||||||
if len(cmdConfig.WhitelistTables) == 1 && strings.HasPrefix(cmdConfig.WhitelistTables[0], "[") {
|
if len(cmdConfig.WhitelistTables) == 1 && strings.ContainsRune(cmdConfig.WhitelistTables[0], ',') {
|
||||||
cmdConfig.WhitelistTables, err = cmd.PersistentFlags().GetStringSlice("whitelist")
|
cmdConfig.WhitelistTables, err = cmd.PersistentFlags().GetStringSlice("whitelist")
|
||||||
if err != nil {
|
if err != nil {
|
||||||
return err
|
return err
|
||||||
|
@ -153,15 +164,23 @@ func preRun(cmd *cobra.Command, args []string) error {
|
||||||
}
|
}
|
||||||
|
|
||||||
cmdConfig.Tags = viper.GetStringSlice("tag")
|
cmdConfig.Tags = viper.GetStringSlice("tag")
|
||||||
if len(cmdConfig.Tags) == 1 && strings.HasPrefix(cmdConfig.Tags[0], "[") {
|
if len(cmdConfig.Tags) == 1 && strings.ContainsRune(cmdConfig.Tags[0], ',') {
|
||||||
cmdConfig.Tags, err = cmd.PersistentFlags().GetStringSlice("tag")
|
cmdConfig.Tags, err = cmd.PersistentFlags().GetStringSlice("tag")
|
||||||
if err != nil {
|
if err != nil {
|
||||||
return err
|
return err
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
cmdConfig.Replacements = viper.GetStringSlice("replace")
|
||||||
|
if len(cmdConfig.Replacements) == 1 && strings.ContainsRune(cmdConfig.Replacements[0], ',') {
|
||||||
|
cmdConfig.Replacements, err = cmd.PersistentFlags().GetStringSlice("replace")
|
||||||
|
if err != nil {
|
||||||
|
return err
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
if driverName == "postgres" {
|
if driverName == "postgres" {
|
||||||
cmdConfig.Postgres = PostgresConfig{
|
cmdConfig.Postgres = boilingcore.PostgresConfig{
|
||||||
User: viper.GetString("postgres.user"),
|
User: viper.GetString("postgres.user"),
|
||||||
Pass: viper.GetString("postgres.pass"),
|
Pass: viper.GetString("postgres.pass"),
|
||||||
Host: viper.GetString("postgres.host"),
|
Host: viper.GetString("postgres.host"),
|
||||||
|
@ -183,6 +202,10 @@ func preRun(cmd *cobra.Command, args []string) error {
|
||||||
viper.Set("postgres.port", cmdConfig.Postgres.Port)
|
viper.Set("postgres.port", cmdConfig.Postgres.Port)
|
||||||
}
|
}
|
||||||
|
|
||||||
|
if len(cmdConfig.Schema) == 0 {
|
||||||
|
cmdConfig.Schema = "public"
|
||||||
|
}
|
||||||
|
|
||||||
err = vala.BeginValidation().Validate(
|
err = vala.BeginValidation().Validate(
|
||||||
vala.StringNotEmpty(cmdConfig.Postgres.User, "postgres.user"),
|
vala.StringNotEmpty(cmdConfig.Postgres.User, "postgres.user"),
|
||||||
vala.StringNotEmpty(cmdConfig.Postgres.Host, "postgres.host"),
|
vala.StringNotEmpty(cmdConfig.Postgres.Host, "postgres.host"),
|
||||||
|
@ -197,7 +220,7 @@ func preRun(cmd *cobra.Command, args []string) error {
|
||||||
}
|
}
|
||||||
|
|
||||||
if driverName == "mysql" {
|
if driverName == "mysql" {
|
||||||
cmdConfig.MySQL = MySQLConfig{
|
cmdConfig.MySQL = boilingcore.MySQLConfig{
|
||||||
User: viper.GetString("mysql.user"),
|
User: viper.GetString("mysql.user"),
|
||||||
Pass: viper.GetString("mysql.pass"),
|
Pass: viper.GetString("mysql.pass"),
|
||||||
Host: viper.GetString("mysql.host"),
|
Host: viper.GetString("mysql.host"),
|
||||||
|
@ -206,6 +229,9 @@ func preRun(cmd *cobra.Command, args []string) error {
|
||||||
SSLMode: viper.GetString("mysql.sslmode"),
|
SSLMode: viper.GetString("mysql.sslmode"),
|
||||||
}
|
}
|
||||||
|
|
||||||
|
// Set MySQL TinyintAsBool global var. This flag only applies to MySQL.
|
||||||
|
drivers.TinyintAsBool = viper.GetBool("tinyint-as-bool")
|
||||||
|
|
||||||
// MySQL doesn't have schemas, just databases
|
// MySQL doesn't have schemas, just databases
|
||||||
cmdConfig.Schema = cmdConfig.MySQL.DBName
|
cmdConfig.Schema = cmdConfig.MySQL.DBName
|
||||||
|
|
||||||
|
@ -235,7 +261,47 @@ func preRun(cmd *cobra.Command, args []string) error {
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
cmdState, err = New(cmdConfig)
|
if driverName == "mssql" {
|
||||||
|
cmdConfig.MSSQL = boilingcore.MSSQLConfig{
|
||||||
|
User: viper.GetString("mssql.user"),
|
||||||
|
Pass: viper.GetString("mssql.pass"),
|
||||||
|
Host: viper.GetString("mssql.host"),
|
||||||
|
Port: viper.GetInt("mssql.port"),
|
||||||
|
DBName: viper.GetString("mssql.dbname"),
|
||||||
|
SSLMode: viper.GetString("mssql.sslmode"),
|
||||||
|
}
|
||||||
|
|
||||||
|
// BUG: https://github.com/spf13/viper/issues/71
|
||||||
|
// Despite setting defaults, nested values don't get defaults
|
||||||
|
// Set them manually
|
||||||
|
if cmdConfig.MSSQL.SSLMode == "" {
|
||||||
|
cmdConfig.MSSQL.SSLMode = "true"
|
||||||
|
viper.Set("mssql.sslmode", cmdConfig.MSSQL.SSLMode)
|
||||||
|
}
|
||||||
|
|
||||||
|
if cmdConfig.MSSQL.Port == 0 {
|
||||||
|
cmdConfig.MSSQL.Port = 1433
|
||||||
|
viper.Set("mssql.port", cmdConfig.MSSQL.Port)
|
||||||
|
}
|
||||||
|
|
||||||
|
if len(cmdConfig.Schema) == 0 {
|
||||||
|
cmdConfig.Schema = "dbo"
|
||||||
|
}
|
||||||
|
|
||||||
|
err = vala.BeginValidation().Validate(
|
||||||
|
vala.StringNotEmpty(cmdConfig.MSSQL.User, "mssql.user"),
|
||||||
|
vala.StringNotEmpty(cmdConfig.MSSQL.Host, "mssql.host"),
|
||||||
|
vala.Not(vala.Equals(cmdConfig.MSSQL.Port, 0, "mssql.port")),
|
||||||
|
vala.StringNotEmpty(cmdConfig.MSSQL.DBName, "mssql.dbname"),
|
||||||
|
vala.StringNotEmpty(cmdConfig.MSSQL.SSLMode, "mssql.sslmode"),
|
||||||
|
).Check()
|
||||||
|
|
||||||
|
if err != nil {
|
||||||
|
return commandFailure(err.Error())
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
cmdState, err = boilingcore.New(cmdConfig)
|
||||||
return err
|
return err
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
|
@ -5,9 +5,9 @@ import (
|
||||||
"reflect"
|
"reflect"
|
||||||
"strings"
|
"strings"
|
||||||
|
|
||||||
|
"github.com/lbryio/sqlboiler/boil"
|
||||||
|
"github.com/lbryio/sqlboiler/strmangle"
|
||||||
"github.com/pkg/errors"
|
"github.com/pkg/errors"
|
||||||
"github.com/vattle/sqlboiler/boil"
|
|
||||||
"github.com/vattle/sqlboiler/strmangle"
|
|
||||||
)
|
)
|
||||||
|
|
||||||
type loadRelationshipState struct {
|
type loadRelationshipState struct {
|
||||||
|
@ -206,9 +206,16 @@ func (l loadRelationshipState) loadRelationshipsRecurse(depth int, obj reflect.V
|
||||||
}
|
}
|
||||||
|
|
||||||
bkind := kindStruct
|
bkind := kindStruct
|
||||||
if reflect.Indirect(loadedObject).Kind() != reflect.Struct {
|
if derefed := reflect.Indirect(loadedObject); derefed.Kind() != reflect.Struct {
|
||||||
bkind = kindPtrSliceStruct
|
bkind = kindPtrSliceStruct
|
||||||
loadedObject = loadedObject.Addr()
|
|
||||||
|
// Convert away any helper slice types
|
||||||
|
// elemType is *elem (from []*elem or helperSliceType)
|
||||||
|
// sliceType is *[]*elem
|
||||||
|
elemType := derefed.Type().Elem()
|
||||||
|
sliceType := reflect.PtrTo(reflect.SliceOf(elemType))
|
||||||
|
|
||||||
|
loadedObject = loadedObject.Addr().Convert(sliceType)
|
||||||
}
|
}
|
||||||
return l.loadRelationships(depth+1, loadedObject.Interface(), bkind)
|
return l.loadRelationships(depth+1, loadedObject.Interface(), bkind)
|
||||||
}
|
}
|
||||||
|
@ -241,6 +248,9 @@ func collectLoaded(key string, loadingFrom reflect.Value) (reflect.Value, bindKi
|
||||||
if loadedType.Elem().Kind() == reflect.Struct {
|
if loadedType.Elem().Kind() == reflect.Struct {
|
||||||
bkind = kindStruct
|
bkind = kindStruct
|
||||||
loadedType = reflect.SliceOf(loadedType)
|
loadedType = reflect.SliceOf(loadedType)
|
||||||
|
} else {
|
||||||
|
// Ensure that we get rid of all the helper "XSlice" types
|
||||||
|
loadedType = reflect.SliceOf(loadedType.Elem())
|
||||||
}
|
}
|
||||||
|
|
||||||
collection := reflect.MakeSlice(loadedType, 0, 0)
|
collection := reflect.MakeSlice(loadedType, 0, 0)
|
||||||
|
@ -249,10 +259,14 @@ func collectLoaded(key string, loadingFrom reflect.Value) (reflect.Value, bindKi
|
||||||
for {
|
for {
|
||||||
switch bkind {
|
switch bkind {
|
||||||
case kindStruct:
|
case kindStruct:
|
||||||
|
if !loadedObject.IsNil() {
|
||||||
collection = reflect.Append(collection, loadedObject)
|
collection = reflect.Append(collection, loadedObject)
|
||||||
|
}
|
||||||
case kindPtrSliceStruct:
|
case kindPtrSliceStruct:
|
||||||
|
if !loadedObject.IsNil() {
|
||||||
collection = reflect.AppendSlice(collection, loadedObject)
|
collection = reflect.AppendSlice(collection, loadedObject)
|
||||||
}
|
}
|
||||||
|
}
|
||||||
|
|
||||||
i++
|
i++
|
||||||
if i >= lnFrom {
|
if i >= lnFrom {
|
||||||
|
|
|
@ -4,7 +4,7 @@ import (
|
||||||
"fmt"
|
"fmt"
|
||||||
"testing"
|
"testing"
|
||||||
|
|
||||||
"github.com/vattle/sqlboiler/boil"
|
"github.com/lbryio/sqlboiler/boil"
|
||||||
)
|
)
|
||||||
|
|
||||||
var testEagerCounters struct {
|
var testEagerCounters struct {
|
||||||
|
|
|
@ -4,7 +4,7 @@ import (
|
||||||
"fmt"
|
"fmt"
|
||||||
"reflect"
|
"reflect"
|
||||||
|
|
||||||
"github.com/vattle/sqlboiler/strmangle"
|
"github.com/lbryio/sqlboiler/strmangle"
|
||||||
)
|
)
|
||||||
|
|
||||||
// NonZeroDefaultSet returns the fields included in the
|
// NonZeroDefaultSet returns the fields included in the
|
||||||
|
|
|
@ -5,7 +5,7 @@ import (
|
||||||
"testing"
|
"testing"
|
||||||
"time"
|
"time"
|
||||||
|
|
||||||
null "gopkg.in/nullbio/null.v6"
|
null "github.com/lbryio/lbry.go/v2/extras/null"
|
||||||
)
|
)
|
||||||
|
|
||||||
type testObj struct {
|
type testObj struct {
|
||||||
|
|
|
@ -1,6 +1,6 @@
|
||||||
package qm
|
package qm
|
||||||
|
|
||||||
import "github.com/vattle/sqlboiler/queries"
|
import "github.com/lbryio/sqlboiler/queries"
|
||||||
|
|
||||||
// QueryMod to modify the query object
|
// QueryMod to modify the query object
|
||||||
type QueryMod func(q *queries.Query)
|
type QueryMod func(q *queries.Query)
|
||||||
|
@ -8,9 +8,11 @@ type QueryMod func(q *queries.Query)
|
||||||
// Apply the query mods to the Query object
|
// Apply the query mods to the Query object
|
||||||
func Apply(q *queries.Query, mods ...QueryMod) {
|
func Apply(q *queries.Query, mods ...QueryMod) {
|
||||||
for _, mod := range mods {
|
for _, mod := range mods {
|
||||||
|
if mod != nil {
|
||||||
mod(q)
|
mod(q)
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
}
|
||||||
|
|
||||||
// SQL allows you to execute a plain SQL statement
|
// SQL allows you to execute a plain SQL statement
|
||||||
func SQL(sql string, args ...interface{}) QueryMod {
|
func SQL(sql string, args ...interface{}) QueryMod {
|
||||||
|
@ -123,6 +125,12 @@ func From(from string) QueryMod {
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
func ForceIndex(index string) QueryMod {
|
||||||
|
return func(q *queries.Query) {
|
||||||
|
queries.SetForceIndex(q, index)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
// Limit the number of returned rows
|
// Limit the number of returned rows
|
||||||
func Limit(limit int) QueryMod {
|
func Limit(limit int) QueryMod {
|
||||||
return func(q *queries.Query) {
|
return func(q *queries.Query) {
|
||||||
|
|
|
@ -4,7 +4,8 @@ import (
|
||||||
"database/sql"
|
"database/sql"
|
||||||
"fmt"
|
"fmt"
|
||||||
|
|
||||||
"github.com/vattle/sqlboiler/boil"
|
"github.com/lbryio/lbry.go/v2/extras/errors"
|
||||||
|
"github.com/lbryio/sqlboiler/boil"
|
||||||
)
|
)
|
||||||
|
|
||||||
// joinKind is the type of join
|
// joinKind is the type of join
|
||||||
|
@ -29,6 +30,7 @@ type Query struct {
|
||||||
selectCols []string
|
selectCols []string
|
||||||
count bool
|
count bool
|
||||||
from []string
|
from []string
|
||||||
|
forceindex string
|
||||||
joins []join
|
joins []join
|
||||||
where []where
|
where []where
|
||||||
in []in
|
in []in
|
||||||
|
@ -52,6 +54,9 @@ type Dialect struct {
|
||||||
// Bool flag indicating whether indexed
|
// Bool flag indicating whether indexed
|
||||||
// placeholders ($1) are used, or ? placeholders.
|
// placeholders ($1) are used, or ? placeholders.
|
||||||
IndexPlaceholders bool
|
IndexPlaceholders bool
|
||||||
|
// Bool flag indicating whether "TOP" or "LIMIT" clause
|
||||||
|
// must be used for rows limitation
|
||||||
|
UseTopClause bool
|
||||||
}
|
}
|
||||||
|
|
||||||
type where struct {
|
type where struct {
|
||||||
|
@ -133,7 +138,7 @@ func (q *Query) Query() (*sql.Rows, error) {
|
||||||
func (q *Query) ExecP() sql.Result {
|
func (q *Query) ExecP() sql.Result {
|
||||||
res, err := q.Exec()
|
res, err := q.Exec()
|
||||||
if err != nil {
|
if err != nil {
|
||||||
panic(boil.WrapErr(err))
|
panic(errors.Err(err))
|
||||||
}
|
}
|
||||||
|
|
||||||
return res
|
return res
|
||||||
|
@ -144,7 +149,7 @@ func (q *Query) ExecP() sql.Result {
|
||||||
func (q *Query) QueryP() *sql.Rows {
|
func (q *Query) QueryP() *sql.Rows {
|
||||||
rows, err := q.Query()
|
rows, err := q.Query()
|
||||||
if err != nil {
|
if err != nil {
|
||||||
panic(boil.WrapErr(err))
|
panic(errors.Err(err))
|
||||||
}
|
}
|
||||||
|
|
||||||
return rows
|
return rows
|
||||||
|
@ -259,6 +264,11 @@ func SetLastWhereAsOr(q *Query) {
|
||||||
q.where[len(q.where)-1].orSeparator = true
|
q.where[len(q.where)-1].orSeparator = true
|
||||||
}
|
}
|
||||||
|
|
||||||
|
// SetForceIndex sets the index to be used by the query
|
||||||
|
func SetForceIndex(q *Query, index string){
|
||||||
|
q.forceindex = index
|
||||||
|
}
|
||||||
|
|
||||||
// SetLastInAsOr sets the or separator for the tail "IN" in the slice
|
// SetLastInAsOr sets the or separator for the tail "IN" in the slice
|
||||||
func SetLastInAsOr(q *Query) {
|
func SetLastInAsOr(q *Query) {
|
||||||
if len(q.in) == 0 {
|
if len(q.in) == 0 {
|
||||||
|
|
|
@ -7,7 +7,7 @@ import (
|
||||||
"sort"
|
"sort"
|
||||||
"strings"
|
"strings"
|
||||||
|
|
||||||
"github.com/vattle/sqlboiler/strmangle"
|
"github.com/lbryio/sqlboiler/strmangle"
|
||||||
)
|
)
|
||||||
|
|
||||||
var (
|
var (
|
||||||
|
@ -46,6 +46,12 @@ func buildSelectQuery(q *Query) (*bytes.Buffer, []interface{}) {
|
||||||
|
|
||||||
buf.WriteString("SELECT ")
|
buf.WriteString("SELECT ")
|
||||||
|
|
||||||
|
if q.dialect.UseTopClause {
|
||||||
|
if q.limit != 0 && q.offset == 0 {
|
||||||
|
fmt.Fprintf(buf, " TOP (%d) ", q.limit)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
if q.count {
|
if q.count {
|
||||||
buf.WriteString("COUNT(")
|
buf.WriteString("COUNT(")
|
||||||
}
|
}
|
||||||
|
@ -70,8 +76,14 @@ func buildSelectQuery(q *Query) (*bytes.Buffer, []interface{}) {
|
||||||
buf.WriteByte(')')
|
buf.WriteByte(')')
|
||||||
}
|
}
|
||||||
|
|
||||||
|
if len(q.forceindex) > 0 {
|
||||||
|
fmt.Fprintf(buf, " FROM %s FORCE INDEX (%s)", strings.Join(strmangle.IdentQuoteSlice(q.dialect.LQ, q.dialect.RQ, q.from), ", "),q.forceindex)
|
||||||
|
|
||||||
|
}else{
|
||||||
fmt.Fprintf(buf, " FROM %s", strings.Join(strmangle.IdentQuoteSlice(q.dialect.LQ, q.dialect.RQ, q.from), ", "))
|
fmt.Fprintf(buf, " FROM %s", strings.Join(strmangle.IdentQuoteSlice(q.dialect.LQ, q.dialect.RQ, q.from), ", "))
|
||||||
|
|
||||||
|
}
|
||||||
|
|
||||||
if len(q.joins) > 0 {
|
if len(q.joins) > 0 {
|
||||||
argsLen := len(args)
|
argsLen := len(args)
|
||||||
joinBuf := strmangle.GetBuffer()
|
joinBuf := strmangle.GetBuffer()
|
||||||
|
@ -184,18 +196,23 @@ func buildUpdateQuery(q *Query) (*bytes.Buffer, []interface{}) {
|
||||||
}
|
}
|
||||||
|
|
||||||
// BuildUpsertQueryMySQL builds a SQL statement string using the upsertData provided.
|
// BuildUpsertQueryMySQL builds a SQL statement string using the upsertData provided.
|
||||||
func BuildUpsertQueryMySQL(dia Dialect, tableName string, update, whitelist []string) string {
|
func BuildUpsertQueryMySQL(dia Dialect, tableName string, update, whitelist []string, autoIncrementCol string) string {
|
||||||
whitelist = strmangle.IdentQuoteSlice(dia.LQ, dia.RQ, whitelist)
|
whitelist = strmangle.IdentQuoteSlice(dia.LQ, dia.RQ, whitelist)
|
||||||
|
|
||||||
buf := strmangle.GetBuffer()
|
buf := strmangle.GetBuffer()
|
||||||
defer strmangle.PutBuffer(buf)
|
defer strmangle.PutBuffer(buf)
|
||||||
|
|
||||||
|
var columns string
|
||||||
|
if len(whitelist) != 0 {
|
||||||
|
columns = strings.Join(whitelist, ", ")
|
||||||
|
}
|
||||||
|
|
||||||
if len(update) == 0 {
|
if len(update) == 0 {
|
||||||
fmt.Fprintf(
|
fmt.Fprintf(
|
||||||
buf,
|
buf,
|
||||||
"INSERT IGNORE INTO %s (%s) VALUES (%s)",
|
"INSERT IGNORE INTO %s (%s) VALUES (%s)",
|
||||||
tableName,
|
tableName,
|
||||||
strings.Join(whitelist, ", "),
|
columns,
|
||||||
strmangle.Placeholders(dia.IndexPlaceholders, len(whitelist), 1, 1),
|
strmangle.Placeholders(dia.IndexPlaceholders, len(whitelist), 1, 1),
|
||||||
)
|
)
|
||||||
return buf.String()
|
return buf.String()
|
||||||
|
@ -205,10 +222,15 @@ func BuildUpsertQueryMySQL(dia Dialect, tableName string, update, whitelist []st
|
||||||
buf,
|
buf,
|
||||||
"INSERT INTO %s (%s) VALUES (%s) ON DUPLICATE KEY UPDATE ",
|
"INSERT INTO %s (%s) VALUES (%s) ON DUPLICATE KEY UPDATE ",
|
||||||
tableName,
|
tableName,
|
||||||
strings.Join(whitelist, ", "),
|
columns,
|
||||||
strmangle.Placeholders(dia.IndexPlaceholders, len(whitelist), 1, 1),
|
strmangle.Placeholders(dia.IndexPlaceholders, len(whitelist), 1, 1),
|
||||||
)
|
)
|
||||||
|
|
||||||
|
// https://stackoverflow.com/questions/778534/mysql-on-duplicate-key-last-insert-id
|
||||||
|
if autoIncrementCol != "" {
|
||||||
|
buf.WriteString(autoIncrementCol + " = LAST_INSERT_ID(" + autoIncrementCol + "), ")
|
||||||
|
}
|
||||||
|
|
||||||
for i, v := range update {
|
for i, v := range update {
|
||||||
if i != 0 {
|
if i != 0 {
|
||||||
buf.WriteByte(',')
|
buf.WriteByte(',')
|
||||||
|
@ -232,12 +254,18 @@ func BuildUpsertQueryPostgres(dia Dialect, tableName string, updateOnConflict bo
|
||||||
buf := strmangle.GetBuffer()
|
buf := strmangle.GetBuffer()
|
||||||
defer strmangle.PutBuffer(buf)
|
defer strmangle.PutBuffer(buf)
|
||||||
|
|
||||||
|
columns := "DEFAULT VALUES"
|
||||||
|
if len(whitelist) != 0 {
|
||||||
|
columns = fmt.Sprintf("(%s) VALUES (%s)",
|
||||||
|
strings.Join(whitelist, ", "),
|
||||||
|
strmangle.Placeholders(dia.IndexPlaceholders, len(whitelist), 1, 1))
|
||||||
|
}
|
||||||
|
|
||||||
fmt.Fprintf(
|
fmt.Fprintf(
|
||||||
buf,
|
buf,
|
||||||
"INSERT INTO %s (%s) VALUES (%s) ON CONFLICT ",
|
"INSERT INTO %s %s ON CONFLICT ",
|
||||||
tableName,
|
tableName,
|
||||||
strings.Join(whitelist, ", "),
|
columns,
|
||||||
strmangle.Placeholders(dia.IndexPlaceholders, len(whitelist), 1, 1),
|
|
||||||
)
|
)
|
||||||
|
|
||||||
if !updateOnConflict || len(update) == 0 {
|
if !updateOnConflict || len(update) == 0 {
|
||||||
|
@ -266,6 +294,49 @@ func BuildUpsertQueryPostgres(dia Dialect, tableName string, updateOnConflict bo
|
||||||
return buf.String()
|
return buf.String()
|
||||||
}
|
}
|
||||||
|
|
||||||
|
// BuildUpsertQueryMSSQL builds a SQL statement string using the upsertData provided.
|
||||||
|
func BuildUpsertQueryMSSQL(dia Dialect, tableName string, primary, update, insert []string, output []string) string {
|
||||||
|
insert = strmangle.IdentQuoteSlice(dia.LQ, dia.RQ, insert)
|
||||||
|
|
||||||
|
buf := strmangle.GetBuffer()
|
||||||
|
defer strmangle.PutBuffer(buf)
|
||||||
|
|
||||||
|
startIndex := 1
|
||||||
|
|
||||||
|
fmt.Fprintf(buf, "MERGE INTO %s as [t]\n", tableName)
|
||||||
|
fmt.Fprintf(buf, "USING (SELECT %s) as [s] ([%s])\n",
|
||||||
|
strmangle.Placeholders(dia.IndexPlaceholders, len(primary), startIndex, 1),
|
||||||
|
strings.Join(primary, string(dia.RQ)+","+string(dia.LQ)))
|
||||||
|
fmt.Fprint(buf, "ON (")
|
||||||
|
for i, v := range primary {
|
||||||
|
if i != 0 {
|
||||||
|
fmt.Fprint(buf, " AND ")
|
||||||
|
}
|
||||||
|
fmt.Fprintf(buf, "[s].[%s] = [t].[%s]", v, v)
|
||||||
|
}
|
||||||
|
fmt.Fprint(buf, ")\n")
|
||||||
|
|
||||||
|
startIndex += len(primary)
|
||||||
|
|
||||||
|
fmt.Fprint(buf, "WHEN MATCHED THEN ")
|
||||||
|
fmt.Fprintf(buf, "UPDATE SET %s\n", strmangle.SetParamNames(string(dia.LQ), string(dia.RQ), startIndex, update))
|
||||||
|
|
||||||
|
startIndex += len(update)
|
||||||
|
|
||||||
|
fmt.Fprint(buf, "WHEN NOT MATCHED THEN ")
|
||||||
|
fmt.Fprintf(buf, "INSERT (%s) VALUES (%s)",
|
||||||
|
strings.Join(insert, ", "),
|
||||||
|
strmangle.Placeholders(dia.IndexPlaceholders, len(insert), startIndex, 1))
|
||||||
|
|
||||||
|
if len(output) > 0 {
|
||||||
|
fmt.Fprintf(buf, "\nOUTPUT INSERTED.[%s];", strings.Join(output, "],INSERTED.["))
|
||||||
|
} else {
|
||||||
|
fmt.Fprint(buf, ";")
|
||||||
|
}
|
||||||
|
|
||||||
|
return buf.String()
|
||||||
|
}
|
||||||
|
|
||||||
func writeModifiers(q *Query, buf *bytes.Buffer, args *[]interface{}) {
|
func writeModifiers(q *Query, buf *bytes.Buffer, args *[]interface{}) {
|
||||||
if len(q.groupBy) != 0 {
|
if len(q.groupBy) != 0 {
|
||||||
fmt.Fprintf(buf, " GROUP BY %s", strings.Join(q.groupBy, ", "))
|
fmt.Fprintf(buf, " GROUP BY %s", strings.Join(q.groupBy, ", "))
|
||||||
|
@ -297,12 +368,37 @@ func writeModifiers(q *Query, buf *bytes.Buffer, args *[]interface{}) {
|
||||||
buf.WriteString(strings.Join(q.orderBy, ", "))
|
buf.WriteString(strings.Join(q.orderBy, ", "))
|
||||||
}
|
}
|
||||||
|
|
||||||
|
if !q.dialect.UseTopClause {
|
||||||
if q.limit != 0 {
|
if q.limit != 0 {
|
||||||
fmt.Fprintf(buf, " LIMIT %d", q.limit)
|
fmt.Fprintf(buf, " LIMIT %d", q.limit)
|
||||||
}
|
}
|
||||||
|
|
||||||
if q.offset != 0 {
|
if q.offset != 0 {
|
||||||
fmt.Fprintf(buf, " OFFSET %d", q.offset)
|
fmt.Fprintf(buf, " OFFSET %d", q.offset)
|
||||||
}
|
}
|
||||||
|
} else {
|
||||||
|
// From MS SQL 2012 and above: https://technet.microsoft.com/en-us/library/ms188385(v=sql.110).aspx
|
||||||
|
// ORDER BY ...
|
||||||
|
// OFFSET N ROWS
|
||||||
|
// FETCH NEXT M ROWS ONLY
|
||||||
|
if q.offset != 0 {
|
||||||
|
|
||||||
|
// Hack from https://www.microsoftpressstore.com/articles/article.aspx?p=2314819
|
||||||
|
// ...
|
||||||
|
// As mentioned, the OFFSET-FETCH filter requires an ORDER BY clause. If you want to use arbitrary order,
|
||||||
|
// like TOP without an ORDER BY clause, you can use the trick with ORDER BY (SELECT NULL)
|
||||||
|
// ...
|
||||||
|
if len(q.orderBy) == 0 {
|
||||||
|
buf.WriteString(" ORDER BY (SELECT NULL)")
|
||||||
|
}
|
||||||
|
|
||||||
|
fmt.Fprintf(buf, " OFFSET %d", q.offset)
|
||||||
|
|
||||||
|
if q.limit != 0 {
|
||||||
|
fmt.Fprintf(buf, " FETCH NEXT %d ROWS ONLY", q.limit)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
if len(q.forlock) != 0 {
|
if len(q.forlock) != 0 {
|
||||||
fmt.Fprintf(buf, " FOR %s", q.forlock)
|
fmt.Fprintf(buf, " FOR %s", q.forlock)
|
||||||
|
|
|
@ -7,9 +7,9 @@ import (
|
||||||
"strings"
|
"strings"
|
||||||
"sync"
|
"sync"
|
||||||
|
|
||||||
|
"github.com/lbryio/sqlboiler/strmangle"
|
||||||
|
|
||||||
"github.com/pkg/errors"
|
"github.com/pkg/errors"
|
||||||
"github.com/vattle/sqlboiler/boil"
|
|
||||||
"github.com/vattle/sqlboiler/strmangle"
|
|
||||||
)
|
)
|
||||||
|
|
||||||
var (
|
var (
|
||||||
|
@ -41,7 +41,7 @@ const (
|
||||||
// It panics on error. See boil.Bind() documentation.
|
// It panics on error. See boil.Bind() documentation.
|
||||||
func (q *Query) BindP(obj interface{}) {
|
func (q *Query) BindP(obj interface{}) {
|
||||||
if err := q.Bind(obj); err != nil {
|
if err := q.Bind(obj); err != nil {
|
||||||
panic(boil.WrapErr(err))
|
panic(errors.WithStack(err))
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
|
@ -4,6 +4,7 @@ package randomize
|
||||||
import (
|
import (
|
||||||
"database/sql"
|
"database/sql"
|
||||||
"fmt"
|
"fmt"
|
||||||
|
"math"
|
||||||
"math/rand"
|
"math/rand"
|
||||||
"reflect"
|
"reflect"
|
||||||
"regexp"
|
"regexp"
|
||||||
|
@ -13,12 +14,12 @@ import (
|
||||||
"sync/atomic"
|
"sync/atomic"
|
||||||
"time"
|
"time"
|
||||||
|
|
||||||
null "gopkg.in/nullbio/null.v6"
|
null "github.com/lbryio/lbry.go/v2/extras/null"
|
||||||
|
|
||||||
"github.com/pkg/errors"
|
"github.com/pkg/errors"
|
||||||
"github.com/satori/go.uuid"
|
"github.com/satori/go.uuid"
|
||||||
"github.com/vattle/sqlboiler/strmangle"
|
"github.com/lbryio/sqlboiler/strmangle"
|
||||||
"github.com/vattle/sqlboiler/types"
|
"github.com/lbryio/sqlboiler/types"
|
||||||
)
|
)
|
||||||
|
|
||||||
var (
|
var (
|
||||||
|
@ -71,7 +72,7 @@ func NewSeed() *Seed {
|
||||||
}
|
}
|
||||||
|
|
||||||
func (s *Seed) nextInt() int {
|
func (s *Seed) nextInt() int {
|
||||||
return int(atomic.AddInt64((*int64)(s), 1))
|
return int(atomic.AddInt64((*int64)(s), 1) % math.MaxInt32)
|
||||||
}
|
}
|
||||||
|
|
||||||
// Struct gets its fields filled with random data based on the seed.
|
// Struct gets its fields filled with random data based on the seed.
|
||||||
|
@ -501,23 +502,23 @@ func getStructRandValue(s *Seed, typ reflect.Type) interface{} {
|
||||||
case typeNullFloat64:
|
case typeNullFloat64:
|
||||||
return null.NewFloat64(float64(s.nextInt()%10)/10.0+float64(s.nextInt()%10), true)
|
return null.NewFloat64(float64(s.nextInt()%10)/10.0+float64(s.nextInt()%10), true)
|
||||||
case typeNullInt:
|
case typeNullInt:
|
||||||
return null.NewInt(int(int32(s.nextInt())), true)
|
return null.NewInt(int(int32(s.nextInt()%math.MaxInt32)), true)
|
||||||
case typeNullInt8:
|
case typeNullInt8:
|
||||||
return null.NewInt8(int8(s.nextInt()), true)
|
return null.NewInt8(int8(s.nextInt()%math.MaxInt8), true)
|
||||||
case typeNullInt16:
|
case typeNullInt16:
|
||||||
return null.NewInt16(int16(s.nextInt()), true)
|
return null.NewInt16(int16(s.nextInt()%math.MaxInt16), true)
|
||||||
case typeNullInt32:
|
case typeNullInt32:
|
||||||
return null.NewInt32(int32(s.nextInt()), true)
|
return null.NewInt32(int32(s.nextInt()%math.MaxInt32), true)
|
||||||
case typeNullInt64:
|
case typeNullInt64:
|
||||||
return null.NewInt64(int64(s.nextInt()), true)
|
return null.NewInt64(int64(s.nextInt()), true)
|
||||||
case typeNullUint:
|
case typeNullUint:
|
||||||
return null.NewUint(uint(s.nextInt()), true)
|
return null.NewUint(uint(s.nextInt()), true)
|
||||||
case typeNullUint8:
|
case typeNullUint8:
|
||||||
return null.NewUint8(uint8(s.nextInt()), true)
|
return null.NewUint8(uint8(s.nextInt()%math.MaxUint8), true)
|
||||||
case typeNullUint16:
|
case typeNullUint16:
|
||||||
return null.NewUint16(uint16(s.nextInt()), true)
|
return null.NewUint16(uint16(s.nextInt()%math.MaxUint16), true)
|
||||||
case typeNullUint32:
|
case typeNullUint32:
|
||||||
return null.NewUint32(uint32(s.nextInt()), true)
|
return null.NewUint32(uint32(s.nextInt()%math.MaxUint32), true)
|
||||||
case typeNullUint64:
|
case typeNullUint64:
|
||||||
return null.NewUint64(uint64(s.nextInt()), true)
|
return null.NewUint64(uint64(s.nextInt()), true)
|
||||||
case typeNullBytes:
|
case typeNullBytes:
|
||||||
|
@ -590,21 +591,21 @@ func getVariableRandValue(s *Seed, kind reflect.Kind, typ reflect.Type) interfac
|
||||||
case reflect.Int:
|
case reflect.Int:
|
||||||
return s.nextInt()
|
return s.nextInt()
|
||||||
case reflect.Int8:
|
case reflect.Int8:
|
||||||
return int8(s.nextInt())
|
return int8(s.nextInt() % math.MaxInt8)
|
||||||
case reflect.Int16:
|
case reflect.Int16:
|
||||||
return int16(s.nextInt())
|
return int16(s.nextInt() % math.MaxInt16)
|
||||||
case reflect.Int32:
|
case reflect.Int32:
|
||||||
return int32(s.nextInt())
|
return int32(s.nextInt() % math.MaxInt32)
|
||||||
case reflect.Int64:
|
case reflect.Int64:
|
||||||
return int64(s.nextInt())
|
return int64(s.nextInt())
|
||||||
case reflect.Uint:
|
case reflect.Uint:
|
||||||
return uint(s.nextInt())
|
return uint(s.nextInt())
|
||||||
case reflect.Uint8:
|
case reflect.Uint8:
|
||||||
return uint8(s.nextInt())
|
return uint8(s.nextInt() % math.MaxUint8)
|
||||||
case reflect.Uint16:
|
case reflect.Uint16:
|
||||||
return uint16(s.nextInt())
|
return uint16(s.nextInt() % math.MaxUint16)
|
||||||
case reflect.Uint32:
|
case reflect.Uint32:
|
||||||
return uint32(s.nextInt())
|
return uint32(s.nextInt() % math.MaxUint32)
|
||||||
case reflect.Uint64:
|
case reflect.Uint64:
|
||||||
return uint64(s.nextInt())
|
return uint64(s.nextInt())
|
||||||
case reflect.Bool:
|
case reflect.Bool:
|
||||||
|
|
|
@ -5,7 +5,7 @@ import (
|
||||||
"testing"
|
"testing"
|
||||||
"time"
|
"time"
|
||||||
|
|
||||||
null "gopkg.in/nullbio/null.v6"
|
null "github.com/lbryio/lbry.go/v2/extras/null"
|
||||||
)
|
)
|
||||||
|
|
||||||
func TestRandomizeStruct(t *testing.T) {
|
func TestRandomizeStruct(t *testing.T) {
|
||||||
|
|
|
@ -92,6 +92,10 @@ func newBoilRuleset() *inflect.Ruleset {
|
||||||
rs.AddPluralExact("oxen", "oxen", true)
|
rs.AddPluralExact("oxen", "oxen", true)
|
||||||
rs.AddPluralExact("quiz", "quizzes", true)
|
rs.AddPluralExact("quiz", "quizzes", true)
|
||||||
rs.AddSingular("s", "")
|
rs.AddSingular("s", "")
|
||||||
|
rs.AddSingular("ss", "ss")
|
||||||
|
rs.AddSingular("as", "as")
|
||||||
|
rs.AddSingular("us", "us")
|
||||||
|
rs.AddSingular("is", "is")
|
||||||
rs.AddSingular("news", "news")
|
rs.AddSingular("news", "news")
|
||||||
rs.AddSingular("ta", "tum")
|
rs.AddSingular("ta", "tum")
|
||||||
rs.AddSingular("ia", "ium")
|
rs.AddSingular("ia", "ium")
|
||||||
|
@ -184,5 +188,15 @@ func newBoilRuleset() *inflect.Ruleset {
|
||||||
rs.AddIrregular("move", "moves")
|
rs.AddIrregular("move", "moves")
|
||||||
rs.AddIrregular("zombie", "zombies")
|
rs.AddIrregular("zombie", "zombies")
|
||||||
rs.AddIrregular("cookie", "cookies")
|
rs.AddIrregular("cookie", "cookies")
|
||||||
|
rs.AddSingularExact("a", "a", true)
|
||||||
|
rs.AddSingularExact("i", "i", true)
|
||||||
|
rs.AddSingularExact("is", "is", true)
|
||||||
|
rs.AddSingularExact("us", "us", true)
|
||||||
|
rs.AddSingularExact("as", "as", true)
|
||||||
|
rs.AddPluralExact("a", "a", true)
|
||||||
|
rs.AddPluralExact("i", "i", true)
|
||||||
|
rs.AddPluralExact("is", "is", true)
|
||||||
|
rs.AddPluralExact("us", "us", true)
|
||||||
|
rs.AddPluralExact("as", "as", true)
|
||||||
return rs
|
return rs
|
||||||
}
|
}
|
||||||
|
|
|
@ -43,6 +43,34 @@ var uppercaseWords = map[string]struct{}{
|
||||||
"utf8": {},
|
"utf8": {},
|
||||||
}
|
}
|
||||||
|
|
||||||
|
var reservedWords = map[string]struct{}{
|
||||||
|
"break": {},
|
||||||
|
"case": {},
|
||||||
|
"chan": {},
|
||||||
|
"const": {},
|
||||||
|
"continue": {},
|
||||||
|
"default": {},
|
||||||
|
"defer": {},
|
||||||
|
"else": {},
|
||||||
|
"fallthrough": {},
|
||||||
|
"for": {},
|
||||||
|
"func": {},
|
||||||
|
"go": {},
|
||||||
|
"goto": {},
|
||||||
|
"if": {},
|
||||||
|
"import": {},
|
||||||
|
"interface": {},
|
||||||
|
"map": {},
|
||||||
|
"package": {},
|
||||||
|
"range": {},
|
||||||
|
"return": {},
|
||||||
|
"select": {},
|
||||||
|
"struct": {},
|
||||||
|
"switch": {},
|
||||||
|
"type": {},
|
||||||
|
"var": {},
|
||||||
|
}
|
||||||
|
|
||||||
func init() {
|
func init() {
|
||||||
// Our Boil inflection Ruleset does not include uncountable inflections.
|
// Our Boil inflection Ruleset does not include uncountable inflections.
|
||||||
// This way, people using words like Sheep will not have
|
// This way, people using words like Sheep will not have
|
||||||
|
@ -54,10 +82,11 @@ func init() {
|
||||||
|
|
||||||
// SchemaTable returns a table name with a schema prefixed if
|
// SchemaTable returns a table name with a schema prefixed if
|
||||||
// using a database that supports real schemas, for example,
|
// using a database that supports real schemas, for example,
|
||||||
// for Postgres: "schema_name"."table_name", versus
|
// for Postgres: "schema_name"."table_name",
|
||||||
|
// for MS SQL: [schema_name].[table_name], versus
|
||||||
// simply "table_name" for MySQL (because it does not support real schemas)
|
// simply "table_name" for MySQL (because it does not support real schemas)
|
||||||
func SchemaTable(lq, rq string, driver string, schema string, table string) string {
|
func SchemaTable(lq, rq string, driver string, schema string, table string) string {
|
||||||
if driver == "postgres" && schema != "public" {
|
if (driver == "postgres" && schema != "public") || driver == "mssql" {
|
||||||
return fmt.Sprintf(`%s%s%s.%s%s%s`, lq, schema, rq, lq, table, rq)
|
return fmt.Sprintf(`%s%s%s.%s%s%s`, lq, schema, rq, lq, table, rq)
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -492,6 +521,30 @@ func WhereClause(lq, rq string, start int, cols []string) string {
|
||||||
return buf.String()
|
return buf.String()
|
||||||
}
|
}
|
||||||
|
|
||||||
|
// WhereClauseRepeated returns the where clause repeated with OR clause using start as the $ flag index
|
||||||
|
// For example, if start was 2 output would be: "(colthing=$2 AND colstuff=$3) OR (colthing=$4 AND colstuff=$5)"
|
||||||
|
func WhereClauseRepeated(lq, rq string, start int, cols []string, count int) string {
|
||||||
|
var startIndex int
|
||||||
|
buf := GetBuffer()
|
||||||
|
defer PutBuffer(buf)
|
||||||
|
buf.WriteByte('(')
|
||||||
|
for i := 0; i < count; i++ {
|
||||||
|
if i != 0 {
|
||||||
|
buf.WriteString(") OR (")
|
||||||
|
}
|
||||||
|
|
||||||
|
startIndex = 0
|
||||||
|
if start > 0 {
|
||||||
|
startIndex = start + i*len(cols)
|
||||||
|
}
|
||||||
|
|
||||||
|
buf.WriteString(WhereClause(lq, rq, startIndex, cols))
|
||||||
|
}
|
||||||
|
buf.WriteByte(')')
|
||||||
|
|
||||||
|
return buf.String()
|
||||||
|
}
|
||||||
|
|
||||||
// JoinSlices merges two string slices of equal length
|
// JoinSlices merges two string slices of equal length
|
||||||
func JoinSlices(sep string, a, b []string) []string {
|
func JoinSlices(sep string, a, b []string) []string {
|
||||||
lna, lnb := len(a), len(b)
|
lna, lnb := len(a), len(b)
|
||||||
|
@ -630,3 +683,12 @@ func IsEnumNormal(values []string) bool {
|
||||||
func ShouldTitleCaseEnum(value string) bool {
|
func ShouldTitleCaseEnum(value string) bool {
|
||||||
return rgxEnumShouldTitle.MatchString(value)
|
return rgxEnumShouldTitle.MatchString(value)
|
||||||
}
|
}
|
||||||
|
|
||||||
|
// ReplaceReservedWords takes a word and replaces it with word_ if it's found
|
||||||
|
// in the list of reserved words.
|
||||||
|
func ReplaceReservedWords(word string) string {
|
||||||
|
if _, ok := reservedWords[word]; ok {
|
||||||
|
return word + "_"
|
||||||
|
}
|
||||||
|
return word
|
||||||
|
}
|
||||||
|
|
|
@ -580,3 +580,23 @@ func TestShouldTitleCaseEnum(t *testing.T) {
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
func TestReplaceReservedWords(t *testing.T) {
|
||||||
|
tests := []struct {
|
||||||
|
Word string
|
||||||
|
Replace bool
|
||||||
|
}{
|
||||||
|
{"break", true},
|
||||||
|
{"id", false},
|
||||||
|
{"type", true},
|
||||||
|
}
|
||||||
|
|
||||||
|
for i, test := range tests {
|
||||||
|
got := ReplaceReservedWords(test.Word)
|
||||||
|
if test.Replace && !strings.HasSuffix(got, "_") {
|
||||||
|
t.Errorf("%i) want suffixed (%s), got: %s", i, test.Word, got)
|
||||||
|
} else if !test.Replace && strings.HasSuffix(got, "_") {
|
||||||
|
t.Errorf("%i) want normal (%s), got: %s", i, test.Word, got)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
|
@ -17,6 +17,23 @@ type {{$modelName}} struct {
|
||||||
{{end -}}
|
{{end -}}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
var {{$modelName}}Columns = struct {
|
||||||
|
{{range $column := .Table.Columns -}}
|
||||||
|
{{titleCase $column.Name}} string
|
||||||
|
{{end -}}
|
||||||
|
}{
|
||||||
|
{{range $column := .Table.Columns -}}
|
||||||
|
{{titleCase $column.Name}}: "{{$column.Name}}",
|
||||||
|
{{end -}}
|
||||||
|
}
|
||||||
|
|
||||||
|
// {{$modelName}}Filter allows you to filter on any columns by making them all pointers.
|
||||||
|
type {{$modelName}}Filter struct {
|
||||||
|
{{range $column := .Table.Columns -}}
|
||||||
|
{{titleCase $column.Name}} *{{$column.Type}} `{{generateTags $dot.Tags $column.Name}}boil:"{{$column.Name}}" json:"{{$column.Name}},omitempty" toml:"{{$column.Name}}" yaml:"{{$column.Name}},omitempty"`
|
||||||
|
{{end -}}
|
||||||
|
}
|
||||||
|
|
||||||
{{- if .Table.IsJoinTable -}}
|
{{- if .Table.IsJoinTable -}}
|
||||||
{{- else}}
|
{{- else}}
|
||||||
// {{$modelNameCamel}}R is where relationships are stored.
|
// {{$modelNameCamel}}R is where relationships are stored.
|
||||||
|
|
|
@ -4,9 +4,13 @@
|
||||||
{{- $tableNameSingular := .Table.Name | singular | titleCase -}}
|
{{- $tableNameSingular := .Table.Name | singular | titleCase -}}
|
||||||
var (
|
var (
|
||||||
{{$varNameSingular}}Columns = []string{{"{"}}{{.Table.Columns | columnNames | stringMap .StringFuncs.quoteWrap | join ", "}}{{"}"}}
|
{{$varNameSingular}}Columns = []string{{"{"}}{{.Table.Columns | columnNames | stringMap .StringFuncs.quoteWrap | join ", "}}{{"}"}}
|
||||||
|
{{if eq .DriverName "mssql" -}}
|
||||||
|
{{$varNameSingular}}ColumnsWithAuto = []string{{"{"}}{{.Table.Columns | filterColumnsByAuto true | columnNames | stringMap .StringFuncs.quoteWrap | join ","}}{{"}"}}
|
||||||
|
{{end -}}
|
||||||
{{$varNameSingular}}ColumnsWithoutDefault = []string{{"{"}}{{.Table.Columns | filterColumnsByDefault false | columnNames | stringMap .StringFuncs.quoteWrap | join ","}}{{"}"}}
|
{{$varNameSingular}}ColumnsWithoutDefault = []string{{"{"}}{{.Table.Columns | filterColumnsByDefault false | columnNames | stringMap .StringFuncs.quoteWrap | join ","}}{{"}"}}
|
||||||
{{$varNameSingular}}ColumnsWithDefault = []string{{"{"}}{{.Table.Columns | filterColumnsByDefault true | columnNames | stringMap .StringFuncs.quoteWrap | join ","}}{{"}"}}
|
{{$varNameSingular}}ColumnsWithDefault = []string{{"{"}}{{.Table.Columns | filterColumnsByDefault true | columnNames | stringMap .StringFuncs.quoteWrap | join ","}}{{"}"}}
|
||||||
{{$varNameSingular}}PrimaryKeyColumns = []string{{"{"}}{{.Table.PKey.Columns | stringMap .StringFuncs.quoteWrap | join ", "}}{{"}"}}
|
{{$varNameSingular}}PrimaryKeyColumns = []string{{"{"}}{{.Table.PKey.Columns | stringMap .StringFuncs.quoteWrap | join ", "}}{{"}"}}
|
||||||
|
{{$varNameSingular}}AutoIncrementColumn = "{{.Table.AutoIncrementColumn }}"
|
||||||
)
|
)
|
||||||
|
|
||||||
type (
|
type (
|
||||||
|
@ -18,7 +22,7 @@ type (
|
||||||
{{$tableNameSingular}}Hook func(boil.Executor, *{{$tableNameSingular}}) error
|
{{$tableNameSingular}}Hook func(boil.Executor, *{{$tableNameSingular}}) error
|
||||||
{{- end}}
|
{{- end}}
|
||||||
|
|
||||||
{{$varNameSingular}}Query struct {
|
{{$tableNameSingular}}Query struct {
|
||||||
*queries.Query
|
*queries.Query
|
||||||
}
|
}
|
||||||
)
|
)
|
||||||
|
|
|
@ -16,7 +16,7 @@ var {{$varNameSingular}}AfterUpsertHooks []{{$tableNameSingular}}Hook
|
||||||
func (o *{{$tableNameSingular}}) doBeforeInsertHooks(exec boil.Executor) (err error) {
|
func (o *{{$tableNameSingular}}) doBeforeInsertHooks(exec boil.Executor) (err error) {
|
||||||
for _, hook := range {{$varNameSingular}}BeforeInsertHooks {
|
for _, hook := range {{$varNameSingular}}BeforeInsertHooks {
|
||||||
if err := hook(exec, o); err != nil {
|
if err := hook(exec, o); err != nil {
|
||||||
return err
|
return errors.Err(err)
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -27,7 +27,7 @@ func (o *{{$tableNameSingular}}) doBeforeInsertHooks(exec boil.Executor) (err er
|
||||||
func (o *{{$tableNameSingular}}) doBeforeUpdateHooks(exec boil.Executor) (err error) {
|
func (o *{{$tableNameSingular}}) doBeforeUpdateHooks(exec boil.Executor) (err error) {
|
||||||
for _, hook := range {{$varNameSingular}}BeforeUpdateHooks {
|
for _, hook := range {{$varNameSingular}}BeforeUpdateHooks {
|
||||||
if err := hook(exec, o); err != nil {
|
if err := hook(exec, o); err != nil {
|
||||||
return err
|
return errors.Err(err)
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -38,7 +38,7 @@ func (o *{{$tableNameSingular}}) doBeforeUpdateHooks(exec boil.Executor) (err er
|
||||||
func (o *{{$tableNameSingular}}) doBeforeDeleteHooks(exec boil.Executor) (err error) {
|
func (o *{{$tableNameSingular}}) doBeforeDeleteHooks(exec boil.Executor) (err error) {
|
||||||
for _, hook := range {{$varNameSingular}}BeforeDeleteHooks {
|
for _, hook := range {{$varNameSingular}}BeforeDeleteHooks {
|
||||||
if err := hook(exec, o); err != nil {
|
if err := hook(exec, o); err != nil {
|
||||||
return err
|
return errors.Err(err)
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -49,7 +49,7 @@ func (o *{{$tableNameSingular}}) doBeforeDeleteHooks(exec boil.Executor) (err er
|
||||||
func (o *{{$tableNameSingular}}) doBeforeUpsertHooks(exec boil.Executor) (err error) {
|
func (o *{{$tableNameSingular}}) doBeforeUpsertHooks(exec boil.Executor) (err error) {
|
||||||
for _, hook := range {{$varNameSingular}}BeforeUpsertHooks {
|
for _, hook := range {{$varNameSingular}}BeforeUpsertHooks {
|
||||||
if err := hook(exec, o); err != nil {
|
if err := hook(exec, o); err != nil {
|
||||||
return err
|
return errors.Err(err)
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -60,7 +60,7 @@ func (o *{{$tableNameSingular}}) doBeforeUpsertHooks(exec boil.Executor) (err er
|
||||||
func (o *{{$tableNameSingular}}) doAfterInsertHooks(exec boil.Executor) (err error) {
|
func (o *{{$tableNameSingular}}) doAfterInsertHooks(exec boil.Executor) (err error) {
|
||||||
for _, hook := range {{$varNameSingular}}AfterInsertHooks {
|
for _, hook := range {{$varNameSingular}}AfterInsertHooks {
|
||||||
if err := hook(exec, o); err != nil {
|
if err := hook(exec, o); err != nil {
|
||||||
return err
|
return errors.Err(err)
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -71,7 +71,7 @@ func (o *{{$tableNameSingular}}) doAfterInsertHooks(exec boil.Executor) (err err
|
||||||
func (o *{{$tableNameSingular}}) doAfterSelectHooks(exec boil.Executor) (err error) {
|
func (o *{{$tableNameSingular}}) doAfterSelectHooks(exec boil.Executor) (err error) {
|
||||||
for _, hook := range {{$varNameSingular}}AfterSelectHooks {
|
for _, hook := range {{$varNameSingular}}AfterSelectHooks {
|
||||||
if err := hook(exec, o); err != nil {
|
if err := hook(exec, o); err != nil {
|
||||||
return err
|
return errors.Err(err)
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -82,7 +82,7 @@ func (o *{{$tableNameSingular}}) doAfterSelectHooks(exec boil.Executor) (err err
|
||||||
func (o *{{$tableNameSingular}}) doAfterUpdateHooks(exec boil.Executor) (err error) {
|
func (o *{{$tableNameSingular}}) doAfterUpdateHooks(exec boil.Executor) (err error) {
|
||||||
for _, hook := range {{$varNameSingular}}AfterUpdateHooks {
|
for _, hook := range {{$varNameSingular}}AfterUpdateHooks {
|
||||||
if err := hook(exec, o); err != nil {
|
if err := hook(exec, o); err != nil {
|
||||||
return err
|
return errors.Err(err)
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -93,7 +93,7 @@ func (o *{{$tableNameSingular}}) doAfterUpdateHooks(exec boil.Executor) (err err
|
||||||
func (o *{{$tableNameSingular}}) doAfterDeleteHooks(exec boil.Executor) (err error) {
|
func (o *{{$tableNameSingular}}) doAfterDeleteHooks(exec boil.Executor) (err error) {
|
||||||
for _, hook := range {{$varNameSingular}}AfterDeleteHooks {
|
for _, hook := range {{$varNameSingular}}AfterDeleteHooks {
|
||||||
if err := hook(exec, o); err != nil {
|
if err := hook(exec, o); err != nil {
|
||||||
return err
|
return errors.Err(err)
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -104,7 +104,7 @@ func (o *{{$tableNameSingular}}) doAfterDeleteHooks(exec boil.Executor) (err err
|
||||||
func (o *{{$tableNameSingular}}) doAfterUpsertHooks(exec boil.Executor) (err error) {
|
func (o *{{$tableNameSingular}}) doAfterUpsertHooks(exec boil.Executor) (err error) {
|
||||||
for _, hook := range {{$varNameSingular}}AfterUpsertHooks {
|
for _, hook := range {{$varNameSingular}}AfterUpsertHooks {
|
||||||
if err := hook(exec, o); err != nil {
|
if err := hook(exec, o); err != nil {
|
||||||
return err
|
return errors.Err(err)
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
|
@ -1,27 +1,27 @@
|
||||||
{{- $tableNameSingular := .Table.Name | singular | titleCase -}}
|
{{- $tableNameSingular := .Table.Name | singular | titleCase -}}
|
||||||
{{- $varNameSingular := .Table.Name | singular | camelCase -}}
|
{{- $varNameSingular := .Table.Name | singular | camelCase -}}
|
||||||
// OneP returns a single {{$varNameSingular}} record from the query, and panics on error.
|
// OneP returns a single {{$tableNameSingular}} record from the query, and panics on error.
|
||||||
func (q {{$varNameSingular}}Query) OneP() (*{{$tableNameSingular}}) {
|
func (q {{$tableNameSingular}}Query) OneP() (*{{$tableNameSingular}}) {
|
||||||
o, err := q.One()
|
o, err := q.One()
|
||||||
if err != nil {
|
if err != nil {
|
||||||
panic(boil.WrapErr(err))
|
panic(errors.Err(err))
|
||||||
}
|
}
|
||||||
|
|
||||||
return o
|
return o
|
||||||
}
|
}
|
||||||
|
|
||||||
// One returns a single {{$varNameSingular}} record from the query.
|
// One returns a single {{$tableNameSingular}} record from the query.
|
||||||
func (q {{$varNameSingular}}Query) One() (*{{$tableNameSingular}}, error) {
|
func (q {{$tableNameSingular}}Query) One() (*{{$tableNameSingular}}, error) {
|
||||||
o := &{{$tableNameSingular}}{}
|
o := &{{$tableNameSingular}}{}
|
||||||
|
|
||||||
queries.SetLimit(q.Query, 1)
|
queries.SetLimit(q.Query, 1)
|
||||||
|
|
||||||
err := q.Bind(o)
|
err := q.Bind(o)
|
||||||
if err != nil {
|
if err != nil {
|
||||||
if errors.Cause(err) == sql.ErrNoRows {
|
if errors.Is(err, sql.ErrNoRows) {
|
||||||
return nil, sql.ErrNoRows
|
return nil, nil
|
||||||
}
|
}
|
||||||
return nil, errors.Wrap(err, "{{.PkgName}}: failed to execute a one query for {{.Table.Name}}")
|
return nil, errors.Prefix("{{.PkgName}}: failed to execute a one query for {{.Table.Name}}", err)
|
||||||
}
|
}
|
||||||
|
|
||||||
{{if not .NoHooks -}}
|
{{if not .NoHooks -}}
|
||||||
|
@ -34,22 +34,22 @@ func (q {{$varNameSingular}}Query) One() (*{{$tableNameSingular}}, error) {
|
||||||
}
|
}
|
||||||
|
|
||||||
// AllP returns all {{$tableNameSingular}} records from the query, and panics on error.
|
// AllP returns all {{$tableNameSingular}} records from the query, and panics on error.
|
||||||
func (q {{$varNameSingular}}Query) AllP() {{$tableNameSingular}}Slice {
|
func (q {{$tableNameSingular}}Query) AllP() {{$tableNameSingular}}Slice {
|
||||||
o, err := q.All()
|
o, err := q.All()
|
||||||
if err != nil {
|
if err != nil {
|
||||||
panic(boil.WrapErr(err))
|
panic(errors.Err(err))
|
||||||
}
|
}
|
||||||
|
|
||||||
return o
|
return o
|
||||||
}
|
}
|
||||||
|
|
||||||
// All returns all {{$tableNameSingular}} records from the query.
|
// All returns all {{$tableNameSingular}} records from the query.
|
||||||
func (q {{$varNameSingular}}Query) All() ({{$tableNameSingular}}Slice, error) {
|
func (q {{$tableNameSingular}}Query) All() ({{$tableNameSingular}}Slice, error) {
|
||||||
var o {{$tableNameSingular}}Slice
|
var o []*{{$tableNameSingular}}
|
||||||
|
|
||||||
err := q.Bind(&o)
|
err := q.Bind(&o)
|
||||||
if err != nil {
|
if err != nil {
|
||||||
return nil, errors.Wrap(err, "{{.PkgName}}: failed to assign all query results to {{$tableNameSingular}} slice")
|
return nil, errors.Prefix("{{.PkgName}}: failed to assign all query results to {{$tableNameSingular}} slice", err)
|
||||||
}
|
}
|
||||||
|
|
||||||
{{if not .NoHooks -}}
|
{{if not .NoHooks -}}
|
||||||
|
@ -66,17 +66,17 @@ func (q {{$varNameSingular}}Query) All() ({{$tableNameSingular}}Slice, error) {
|
||||||
}
|
}
|
||||||
|
|
||||||
// CountP returns the count of all {{$tableNameSingular}} records in the query, and panics on error.
|
// CountP returns the count of all {{$tableNameSingular}} records in the query, and panics on error.
|
||||||
func (q {{$varNameSingular}}Query) CountP() int64 {
|
func (q {{$tableNameSingular}}Query) CountP() int64 {
|
||||||
c, err := q.Count()
|
c, err := q.Count()
|
||||||
if err != nil {
|
if err != nil {
|
||||||
panic(boil.WrapErr(err))
|
panic(errors.Err(err))
|
||||||
}
|
}
|
||||||
|
|
||||||
return c
|
return c
|
||||||
}
|
}
|
||||||
|
|
||||||
// Count returns the count of all {{$tableNameSingular}} records in the query.
|
// Count returns the count of all {{$tableNameSingular}} records in the query.
|
||||||
func (q {{$varNameSingular}}Query) Count() (int64, error) {
|
func (q {{$tableNameSingular}}Query) Count() (int64, error) {
|
||||||
var count int64
|
var count int64
|
||||||
|
|
||||||
queries.SetSelect(q.Query, nil)
|
queries.SetSelect(q.Query, nil)
|
||||||
|
@ -84,32 +84,33 @@ func (q {{$varNameSingular}}Query) Count() (int64, error) {
|
||||||
|
|
||||||
err := q.Query.QueryRow().Scan(&count)
|
err := q.Query.QueryRow().Scan(&count)
|
||||||
if err != nil {
|
if err != nil {
|
||||||
return 0, errors.Wrap(err, "{{.PkgName}}: failed to count {{.Table.Name}} rows")
|
return 0, errors.Prefix("{{.PkgName}}: failed to count {{.Table.Name}} rows", err)
|
||||||
}
|
}
|
||||||
|
|
||||||
return count, nil
|
return count, nil
|
||||||
}
|
}
|
||||||
|
|
||||||
// Exists checks if the row exists in the table, and panics on error.
|
// Exists checks if the row exists in the table, and panics on error.
|
||||||
func (q {{$varNameSingular}}Query) ExistsP() bool {
|
func (q {{$tableNameSingular}}Query) ExistsP() bool {
|
||||||
e, err := q.Exists()
|
e, err := q.Exists()
|
||||||
if err != nil {
|
if err != nil {
|
||||||
panic(boil.WrapErr(err))
|
panic(errors.Err(err))
|
||||||
}
|
}
|
||||||
|
|
||||||
return e
|
return e
|
||||||
}
|
}
|
||||||
|
|
||||||
// Exists checks if the row exists in the table.
|
// Exists checks if the row exists in the table.
|
||||||
func (q {{$varNameSingular}}Query) Exists() (bool, error) {
|
func (q {{$tableNameSingular}}Query) Exists() (bool, error) {
|
||||||
var count int64
|
var count int64
|
||||||
|
|
||||||
queries.SetCount(q.Query)
|
queries.SetCount(q.Query)
|
||||||
|
queries.SetSelect(q.Query, []string{})
|
||||||
queries.SetLimit(q.Query, 1)
|
queries.SetLimit(q.Query, 1)
|
||||||
|
|
||||||
err := q.Query.QueryRow().Scan(&count)
|
err := q.Query.QueryRow().Scan(&count)
|
||||||
if err != nil {
|
if err != nil {
|
||||||
return false, errors.Wrap(err, "{{.PkgName}}: failed to check if {{.Table.Name}} exists")
|
return false, errors.Prefix("{{.PkgName}}: failed to check if {{.Table.Name}} exists", err)
|
||||||
}
|
}
|
||||||
|
|
||||||
return count > 0, nil
|
return count > 0, nil
|
||||||
|
|
|
@ -3,14 +3,14 @@
|
||||||
{{- $dot := . -}}
|
{{- $dot := . -}}
|
||||||
{{- range .Table.FKeys -}}
|
{{- range .Table.FKeys -}}
|
||||||
{{- $txt := txtsFromFKey $dot.Tables $dot.Table . -}}
|
{{- $txt := txtsFromFKey $dot.Tables $dot.Table . -}}
|
||||||
{{- $varNameSingular := .ForeignTable | singular | camelCase}}
|
{{- $tableNameSingular := .ForeignTable | singular | titleCase}}
|
||||||
// {{$txt.Function.Name}}G pointed to by the foreign key.
|
// {{$txt.Function.Name}}G pointed to by the foreign key.
|
||||||
func (o *{{$txt.LocalTable.NameGo}}) {{$txt.Function.Name}}G(mods ...qm.QueryMod) {{$varNameSingular}}Query {
|
func (o *{{$txt.LocalTable.NameGo}}) {{$txt.Function.Name}}G(mods ...qm.QueryMod) {{$tableNameSingular}}Query {
|
||||||
return o.{{$txt.Function.Name}}(boil.GetDB(), mods...)
|
return o.{{$txt.Function.Name}}(boil.GetDB(), mods...)
|
||||||
}
|
}
|
||||||
|
|
||||||
// {{$txt.Function.Name}} pointed to by the foreign key.
|
// {{$txt.Function.Name}} pointed to by the foreign key.
|
||||||
func (o *{{$txt.LocalTable.NameGo}}) {{$txt.Function.Name}}(exec boil.Executor, mods ...qm.QueryMod) ({{$varNameSingular}}Query) {
|
func (o *{{$txt.LocalTable.NameGo}}) {{$txt.Function.Name}}(exec boil.Executor, mods ...qm.QueryMod) ({{$tableNameSingular}}Query) {
|
||||||
queryMods := []qm.QueryMod{
|
queryMods := []qm.QueryMod{
|
||||||
qm.Where("{{$txt.ForeignTable.ColumnName}}=?", o.{{$txt.LocalTable.ColumnNameGo}}),
|
qm.Where("{{$txt.ForeignTable.ColumnName}}=?", o.{{$txt.LocalTable.ColumnNameGo}}),
|
||||||
}
|
}
|
||||||
|
|
|
@ -3,14 +3,14 @@
|
||||||
{{- $dot := . -}}
|
{{- $dot := . -}}
|
||||||
{{- range .Table.ToOneRelationships -}}
|
{{- range .Table.ToOneRelationships -}}
|
||||||
{{- $txt := txtsFromOneToOne $dot.Tables $dot.Table . -}}
|
{{- $txt := txtsFromOneToOne $dot.Tables $dot.Table . -}}
|
||||||
{{- $varNameSingular := .ForeignTable | singular | camelCase}}
|
{{- $tableNameSingular := .ForeignTable | singular | titleCase}}
|
||||||
// {{$txt.Function.Name}}G pointed to by the foreign key.
|
// {{$txt.Function.Name}}G pointed to by the foreign key.
|
||||||
func (o *{{$txt.LocalTable.NameGo}}) {{$txt.Function.Name}}G(mods ...qm.QueryMod) {{$varNameSingular}}Query {
|
func (o *{{$txt.LocalTable.NameGo}}) {{$txt.Function.Name}}G(mods ...qm.QueryMod) {{$tableNameSingular}}Query {
|
||||||
return o.{{$txt.Function.Name}}(boil.GetDB(), mods...)
|
return o.{{$txt.Function.Name}}(boil.GetDB(), mods...)
|
||||||
}
|
}
|
||||||
|
|
||||||
// {{$txt.Function.Name}} pointed to by the foreign key.
|
// {{$txt.Function.Name}} pointed to by the foreign key.
|
||||||
func (o *{{$txt.LocalTable.NameGo}}) {{$txt.Function.Name}}(exec boil.Executor, mods ...qm.QueryMod) ({{$varNameSingular}}Query) {
|
func (o *{{$txt.LocalTable.NameGo}}) {{$txt.Function.Name}}(exec boil.Executor, mods ...qm.QueryMod) ({{$tableNameSingular}}Query) {
|
||||||
queryMods := []qm.QueryMod{
|
queryMods := []qm.QueryMod{
|
||||||
qm.Where("{{$txt.ForeignTable.ColumnName}}=?", o.{{$txt.LocalTable.ColumnNameGo}}),
|
qm.Where("{{$txt.ForeignTable.ColumnName}}=?", o.{{$txt.LocalTable.ColumnNameGo}}),
|
||||||
}
|
}
|
||||||
|
|
|
@ -3,20 +3,20 @@
|
||||||
{{- $dot := . -}}
|
{{- $dot := . -}}
|
||||||
{{- $table := .Table -}}
|
{{- $table := .Table -}}
|
||||||
{{- range .Table.ToManyRelationships -}}
|
{{- range .Table.ToManyRelationships -}}
|
||||||
{{- $varNameSingular := .ForeignTable | singular | camelCase -}}
|
{{- $tableNameSingular := .ForeignTable | singular | titleCase -}}
|
||||||
{{- $txt := txtsFromToMany $dot.Tables $table . -}}
|
{{- $txt := txtsFromToMany $dot.Tables $table . -}}
|
||||||
{{- $schemaForeignTable := .ForeignTable | $dot.SchemaTable}}
|
{{- $schemaForeignTable := .ForeignTable | $dot.SchemaTable}}
|
||||||
// {{$txt.Function.Name}}G retrieves all the {{.ForeignTable | singular}}'s {{$txt.ForeignTable.NameHumanReadable}}
|
// {{$txt.Function.Name}}G retrieves all the {{.ForeignTable | singular}}'s {{$txt.ForeignTable.NameHumanReadable}}
|
||||||
{{- if not (eq $txt.Function.Name $txt.ForeignTable.NamePluralGo)}} via {{.ForeignColumn}} column{{- end}}.
|
{{- if not (eq $txt.Function.Name $txt.ForeignTable.NamePluralGo)}} via {{.ForeignColumn}} column{{- end}}.
|
||||||
func (o *{{$txt.LocalTable.NameGo}}) {{$txt.Function.Name}}G(mods ...qm.QueryMod) {{$varNameSingular}}Query {
|
func (o *{{$txt.LocalTable.NameGo}}) {{$txt.Function.Name}}G(mods ...qm.QueryMod) {{$tableNameSingular}}Query {
|
||||||
return o.{{$txt.Function.Name}}(boil.GetDB(), mods...)
|
return o.{{$txt.Function.Name}}(boil.GetDB(), mods...)
|
||||||
}
|
}
|
||||||
|
|
||||||
// {{$txt.Function.Name}} retrieves all the {{.ForeignTable | singular}}'s {{$txt.ForeignTable.NameHumanReadable}} with an executor
|
// {{$txt.Function.Name}} retrieves all the {{.ForeignTable | singular}}'s {{$txt.ForeignTable.NameHumanReadable}} with an executor
|
||||||
{{- if not (eq $txt.Function.Name $txt.ForeignTable.NamePluralGo)}} via {{.ForeignColumn}} column{{- end}}.
|
{{- if not (eq $txt.Function.Name $txt.ForeignTable.NamePluralGo)}} via {{.ForeignColumn}} column{{- end}}.
|
||||||
func (o *{{$txt.LocalTable.NameGo}}) {{$txt.Function.Name}}(exec boil.Executor, mods ...qm.QueryMod) {{$varNameSingular}}Query {
|
func (o *{{$txt.LocalTable.NameGo}}) {{$txt.Function.Name}}(exec boil.Executor, mods ...qm.QueryMod) {{$tableNameSingular}}Query {
|
||||||
queryMods := []qm.QueryMod{
|
queryMods := []qm.QueryMod{
|
||||||
qm.Select("{{id 0 | $dot.Quotes}}.*"),
|
qm.Select("{{$schemaForeignTable}}.*"),
|
||||||
}
|
}
|
||||||
|
|
||||||
if len(mods) != 0 {
|
if len(mods) != 0 {
|
||||||
|
@ -25,17 +25,18 @@ func (o *{{$txt.LocalTable.NameGo}}) {{$txt.Function.Name}}(exec boil.Executor,
|
||||||
|
|
||||||
{{if .ToJoinTable -}}
|
{{if .ToJoinTable -}}
|
||||||
queryMods = append(queryMods,
|
queryMods = append(queryMods,
|
||||||
qm.InnerJoin("{{.JoinTable | $dot.SchemaTable}} as {{id 1 | $dot.Quotes}} on {{id 0 | $dot.Quotes}}.{{.ForeignColumn | $dot.Quotes}} = {{id 1 | $dot.Quotes}}.{{.JoinForeignColumn | $dot.Quotes}}"),
|
{{$schemaJoinTable := .JoinTable | $.SchemaTable -}}
|
||||||
qm.Where("{{id 1 | $dot.Quotes}}.{{.JoinLocalColumn | $dot.Quotes}}=?", o.{{$txt.LocalTable.ColumnNameGo}}),
|
qm.InnerJoin("{{$schemaJoinTable}} on {{$schemaForeignTable}}.{{.ForeignColumn | $dot.Quotes}} = {{$schemaJoinTable}}.{{.JoinForeignColumn | $dot.Quotes}}"),
|
||||||
|
qm.Where("{{$schemaJoinTable}}.{{.JoinLocalColumn | $dot.Quotes}}=?", o.{{$txt.LocalTable.ColumnNameGo}}),
|
||||||
)
|
)
|
||||||
{{else -}}
|
{{else -}}
|
||||||
queryMods = append(queryMods,
|
queryMods = append(queryMods,
|
||||||
qm.Where("{{id 0 | $dot.Quotes}}.{{.ForeignColumn | $dot.Quotes}}=?", o.{{$txt.LocalTable.ColumnNameGo}}),
|
qm.Where("{{$schemaForeignTable}}.{{.ForeignColumn | $dot.Quotes}}=?", o.{{$txt.LocalTable.ColumnNameGo}}),
|
||||||
)
|
)
|
||||||
{{end}}
|
{{end}}
|
||||||
|
|
||||||
query := {{$txt.ForeignTable.NamePluralGo}}(exec, queryMods...)
|
query := {{$txt.ForeignTable.NamePluralGo}}(exec, queryMods...)
|
||||||
queries.SetFrom(query.Query, "{{$schemaForeignTable}} as {{id 0 | $dot.Quotes}}")
|
queries.SetFrom(query.Query, "{{$schemaForeignTable}}")
|
||||||
return query
|
return query
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
|
@ -4,8 +4,7 @@
|
||||||
{{- range .Table.FKeys -}}
|
{{- range .Table.FKeys -}}
|
||||||
{{- $txt := txtsFromFKey $dot.Tables $dot.Table . -}}
|
{{- $txt := txtsFromFKey $dot.Tables $dot.Table . -}}
|
||||||
{{- $varNameSingular := $dot.Table.Name | singular | camelCase -}}
|
{{- $varNameSingular := $dot.Table.Name | singular | camelCase -}}
|
||||||
{{- $arg := printf "maybe%s" $txt.LocalTable.NameGo -}}
|
{{- $arg := printf "maybe%s" $txt.LocalTable.NameGo}}
|
||||||
{{- $slice := printf "%sSlice" $txt.LocalTable.NameGo}}
|
|
||||||
// Load{{$txt.Function.Name}} allows an eager lookup of values, cached into the
|
// Load{{$txt.Function.Name}} allows an eager lookup of values, cached into the
|
||||||
// loaded structs of the objects.
|
// loaded structs of the objects.
|
||||||
func ({{$varNameSingular}}L) Load{{$txt.Function.Name}}(e boil.Executor, singular bool, {{$arg}} interface{}) error {
|
func ({{$varNameSingular}}L) Load{{$txt.Function.Name}}(e boil.Executor, singular bool, {{$arg}} interface{}) error {
|
||||||
|
@ -16,7 +15,7 @@ func ({{$varNameSingular}}L) Load{{$txt.Function.Name}}(e boil.Executor, singula
|
||||||
if singular {
|
if singular {
|
||||||
object = {{$arg}}.(*{{$txt.LocalTable.NameGo}})
|
object = {{$arg}}.(*{{$txt.LocalTable.NameGo}})
|
||||||
} else {
|
} else {
|
||||||
slice = *{{$arg}}.(*{{$slice}})
|
slice = *{{$arg}}.(*[]*{{$txt.LocalTable.NameGo}})
|
||||||
count = len(slice)
|
count = len(slice)
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -46,32 +45,36 @@ func ({{$varNameSingular}}L) Load{{$txt.Function.Name}}(e boil.Executor, singula
|
||||||
|
|
||||||
results, err := e.Query(query, args...)
|
results, err := e.Query(query, args...)
|
||||||
if err != nil {
|
if err != nil {
|
||||||
return errors.Wrap(err, "failed to eager load {{$txt.ForeignTable.NameGo}}")
|
return errors.Prefix("failed to eager load {{$txt.ForeignTable.NameGo}}", err)
|
||||||
}
|
}
|
||||||
defer results.Close()
|
defer results.Close()
|
||||||
|
|
||||||
var resultSlice []*{{$txt.ForeignTable.NameGo}}
|
var resultSlice []*{{$txt.ForeignTable.NameGo}}
|
||||||
if err = queries.Bind(results, &resultSlice); err != nil {
|
if err = queries.Bind(results, &resultSlice); err != nil {
|
||||||
return errors.Wrap(err, "failed to bind eager loaded slice {{$txt.ForeignTable.NameGo}}")
|
return errors.Prefix("failed to bind eager loaded slice {{$txt.ForeignTable.NameGo}}", err)
|
||||||
}
|
}
|
||||||
|
|
||||||
{{if not $dot.NoHooks -}}
|
{{if not $dot.NoHooks -}}
|
||||||
if len({{$varNameSingular}}AfterSelectHooks) != 0 {
|
if len({{$varNameSingular}}AfterSelectHooks) != 0 {
|
||||||
for _, obj := range resultSlice {
|
for _, obj := range resultSlice {
|
||||||
if err := obj.doAfterSelectHooks(e); err != nil {
|
if err := obj.doAfterSelectHooks(e); err != nil {
|
||||||
return err
|
return errors.Err(err)
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
{{- end}}
|
{{- end}}
|
||||||
|
|
||||||
if singular && len(resultSlice) != 0 {
|
if len(resultSlice) == 0 {
|
||||||
|
return nil
|
||||||
|
}
|
||||||
|
|
||||||
|
if singular {
|
||||||
object.R.{{$txt.Function.Name}} = resultSlice[0]
|
object.R.{{$txt.Function.Name}} = resultSlice[0]
|
||||||
return nil
|
return nil
|
||||||
}
|
}
|
||||||
|
|
||||||
for _, foreign := range resultSlice {
|
|
||||||
for _, local := range slice {
|
for _, local := range slice {
|
||||||
|
for _, foreign := range resultSlice {
|
||||||
{{if $txt.Function.UsesBytes -}}
|
{{if $txt.Function.UsesBytes -}}
|
||||||
if 0 == bytes.Compare(local.{{$txt.Function.LocalAssignment}}, foreign.{{$txt.Function.ForeignAssignment}}) {
|
if 0 == bytes.Compare(local.{{$txt.Function.LocalAssignment}}, foreign.{{$txt.Function.ForeignAssignment}}) {
|
||||||
{{else -}}
|
{{else -}}
|
||||||
|
|
|
@ -5,7 +5,6 @@
|
||||||
{{- $txt := txtsFromOneToOne $dot.Tables $dot.Table . -}}
|
{{- $txt := txtsFromOneToOne $dot.Tables $dot.Table . -}}
|
||||||
{{- $varNameSingular := $dot.Table.Name | singular | camelCase -}}
|
{{- $varNameSingular := $dot.Table.Name | singular | camelCase -}}
|
||||||
{{- $arg := printf "maybe%s" $txt.LocalTable.NameGo -}}
|
{{- $arg := printf "maybe%s" $txt.LocalTable.NameGo -}}
|
||||||
{{- $slice := printf "%sSlice" $txt.LocalTable.NameGo}}
|
|
||||||
// Load{{$txt.Function.Name}} allows an eager lookup of values, cached into the
|
// Load{{$txt.Function.Name}} allows an eager lookup of values, cached into the
|
||||||
// loaded structs of the objects.
|
// loaded structs of the objects.
|
||||||
func ({{$varNameSingular}}L) Load{{$txt.Function.Name}}(e boil.Executor, singular bool, {{$arg}} interface{}) error {
|
func ({{$varNameSingular}}L) Load{{$txt.Function.Name}}(e boil.Executor, singular bool, {{$arg}} interface{}) error {
|
||||||
|
@ -16,7 +15,7 @@ func ({{$varNameSingular}}L) Load{{$txt.Function.Name}}(e boil.Executor, singula
|
||||||
if singular {
|
if singular {
|
||||||
object = {{$arg}}.(*{{$txt.LocalTable.NameGo}})
|
object = {{$arg}}.(*{{$txt.LocalTable.NameGo}})
|
||||||
} else {
|
} else {
|
||||||
slice = *{{$arg}}.(*{{$slice}})
|
slice = *{{$arg}}.(*[]*{{$txt.LocalTable.NameGo}})
|
||||||
count = len(slice)
|
count = len(slice)
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -46,32 +45,36 @@ func ({{$varNameSingular}}L) Load{{$txt.Function.Name}}(e boil.Executor, singula
|
||||||
|
|
||||||
results, err := e.Query(query, args...)
|
results, err := e.Query(query, args...)
|
||||||
if err != nil {
|
if err != nil {
|
||||||
return errors.Wrap(err, "failed to eager load {{$txt.ForeignTable.NameGo}}")
|
return errors.Prefix("failed to eager load {{$txt.ForeignTable.NameGo}}", err)
|
||||||
}
|
}
|
||||||
defer results.Close()
|
defer results.Close()
|
||||||
|
|
||||||
var resultSlice []*{{$txt.ForeignTable.NameGo}}
|
var resultSlice []*{{$txt.ForeignTable.NameGo}}
|
||||||
if err = queries.Bind(results, &resultSlice); err != nil {
|
if err = queries.Bind(results, &resultSlice); err != nil {
|
||||||
return errors.Wrap(err, "failed to bind eager loaded slice {{$txt.ForeignTable.NameGo}}")
|
return errors.Prefix("failed to bind eager loaded slice {{$txt.ForeignTable.NameGo}}", err)
|
||||||
}
|
}
|
||||||
|
|
||||||
{{if not $dot.NoHooks -}}
|
{{if not $dot.NoHooks -}}
|
||||||
if len({{$varNameSingular}}AfterSelectHooks) != 0 {
|
if len({{$varNameSingular}}AfterSelectHooks) != 0 {
|
||||||
for _, obj := range resultSlice {
|
for _, obj := range resultSlice {
|
||||||
if err := obj.doAfterSelectHooks(e); err != nil {
|
if err := obj.doAfterSelectHooks(e); err != nil {
|
||||||
return err
|
return errors.Err(err)
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
{{- end}}
|
{{- end}}
|
||||||
|
|
||||||
if singular && len(resultSlice) != 0 {
|
if len(resultSlice) == 0 {
|
||||||
|
return nil
|
||||||
|
}
|
||||||
|
|
||||||
|
if singular {
|
||||||
object.R.{{$txt.Function.Name}} = resultSlice[0]
|
object.R.{{$txt.Function.Name}} = resultSlice[0]
|
||||||
return nil
|
return nil
|
||||||
}
|
}
|
||||||
|
|
||||||
for _, foreign := range resultSlice {
|
|
||||||
for _, local := range slice {
|
for _, local := range slice {
|
||||||
|
for _, foreign := range resultSlice {
|
||||||
{{if $txt.Function.UsesBytes -}}
|
{{if $txt.Function.UsesBytes -}}
|
||||||
if 0 == bytes.Compare(local.{{$txt.Function.LocalAssignment}}, foreign.{{$txt.Function.ForeignAssignment}}) {
|
if 0 == bytes.Compare(local.{{$txt.Function.LocalAssignment}}, foreign.{{$txt.Function.ForeignAssignment}}) {
|
||||||
{{else -}}
|
{{else -}}
|
||||||
|
|
|
@ -5,7 +5,6 @@
|
||||||
{{- $varNameSingular := $dot.Table.Name | singular | camelCase -}}
|
{{- $varNameSingular := $dot.Table.Name | singular | camelCase -}}
|
||||||
{{- $txt := txtsFromToMany $dot.Tables $dot.Table . -}}
|
{{- $txt := txtsFromToMany $dot.Tables $dot.Table . -}}
|
||||||
{{- $arg := printf "maybe%s" $txt.LocalTable.NameGo -}}
|
{{- $arg := printf "maybe%s" $txt.LocalTable.NameGo -}}
|
||||||
{{- $slice := printf "%sSlice" $txt.LocalTable.NameGo -}}
|
|
||||||
{{- $schemaForeignTable := .ForeignTable | $dot.SchemaTable}}
|
{{- $schemaForeignTable := .ForeignTable | $dot.SchemaTable}}
|
||||||
// Load{{$txt.Function.Name}} allows an eager lookup of values, cached into the
|
// Load{{$txt.Function.Name}} allows an eager lookup of values, cached into the
|
||||||
// loaded structs of the objects.
|
// loaded structs of the objects.
|
||||||
|
@ -17,7 +16,7 @@ func ({{$varNameSingular}}L) Load{{$txt.Function.Name}}(e boil.Executor, singula
|
||||||
if singular {
|
if singular {
|
||||||
object = {{$arg}}.(*{{$txt.LocalTable.NameGo}})
|
object = {{$arg}}.(*{{$txt.LocalTable.NameGo}})
|
||||||
} else {
|
} else {
|
||||||
slice = *{{$arg}}.(*{{$slice}})
|
slice = *{{$arg}}.(*[]*{{$txt.LocalTable.NameGo}})
|
||||||
count = len(slice)
|
count = len(slice)
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -55,7 +54,7 @@ func ({{$varNameSingular}}L) Load{{$txt.Function.Name}}(e boil.Executor, singula
|
||||||
|
|
||||||
results, err := e.Query(query, args...)
|
results, err := e.Query(query, args...)
|
||||||
if err != nil {
|
if err != nil {
|
||||||
return errors.Wrap(err, "failed to eager load {{.ForeignTable}}")
|
return errors.Prefix("failed to eager load {{.ForeignTable}}", err)
|
||||||
}
|
}
|
||||||
defer results.Close()
|
defer results.Close()
|
||||||
|
|
||||||
|
@ -71,7 +70,7 @@ func ({{$varNameSingular}}L) Load{{$txt.Function.Name}}(e boil.Executor, singula
|
||||||
|
|
||||||
err = results.Scan({{$foreignTable.Columns | columnNames | stringMap $dot.StringFuncs.titleCase | prefixStringSlice "&one." | join ", "}}, &localJoinCol)
|
err = results.Scan({{$foreignTable.Columns | columnNames | stringMap $dot.StringFuncs.titleCase | prefixStringSlice "&one." | join ", "}}, &localJoinCol)
|
||||||
if err = results.Err(); err != nil {
|
if err = results.Err(); err != nil {
|
||||||
return errors.Wrap(err, "failed to plebian-bind eager loaded slice {{.ForeignTable}}")
|
return errors.Prefix("failed to plebian-bind eager loaded slice {{.ForeignTable}}", err)
|
||||||
}
|
}
|
||||||
|
|
||||||
resultSlice = append(resultSlice, one)
|
resultSlice = append(resultSlice, one)
|
||||||
|
@ -79,11 +78,11 @@ func ({{$varNameSingular}}L) Load{{$txt.Function.Name}}(e boil.Executor, singula
|
||||||
}
|
}
|
||||||
|
|
||||||
if err = results.Err(); err != nil {
|
if err = results.Err(); err != nil {
|
||||||
return errors.Wrap(err, "failed to plebian-bind eager loaded slice {{.ForeignTable}}")
|
return errors.Prefix("failed to plebian-bind eager loaded slice {{.ForeignTable}}", err)
|
||||||
}
|
}
|
||||||
{{else -}}
|
{{else -}}
|
||||||
if err = queries.Bind(results, &resultSlice); err != nil {
|
if err = queries.Bind(results, &resultSlice); err != nil {
|
||||||
return errors.Wrap(err, "failed to bind eager loaded slice {{.ForeignTable}}")
|
return errors.Prefix("failed to bind eager loaded slice {{.ForeignTable}}", err)
|
||||||
}
|
}
|
||||||
{{end}}
|
{{end}}
|
||||||
|
|
||||||
|
@ -91,7 +90,7 @@ func ({{$varNameSingular}}L) Load{{$txt.Function.Name}}(e boil.Executor, singula
|
||||||
if len({{.ForeignTable | singular | camelCase}}AfterSelectHooks) != 0 {
|
if len({{.ForeignTable | singular | camelCase}}AfterSelectHooks) != 0 {
|
||||||
for _, obj := range resultSlice {
|
for _, obj := range resultSlice {
|
||||||
if err := obj.doAfterSelectHooks(e); err != nil {
|
if err := obj.doAfterSelectHooks(e); err != nil {
|
||||||
return err
|
return errors.Err(err)
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
|
@ -6,6 +6,34 @@
|
||||||
{{- $foreignNameSingular := .ForeignTable | singular | camelCase -}}
|
{{- $foreignNameSingular := .ForeignTable | singular | camelCase -}}
|
||||||
{{- $varNameSingular := .Table | singular | camelCase}}
|
{{- $varNameSingular := .Table | singular | camelCase}}
|
||||||
{{- $schemaTable := .Table | $dot.SchemaTable}}
|
{{- $schemaTable := .Table | $dot.SchemaTable}}
|
||||||
|
// Set{{$txt.Function.Name}}G of the {{.Table | singular}} to the related item.
|
||||||
|
// Sets o.R.{{$txt.Function.Name}} to related.
|
||||||
|
// Adds o to related.R.{{$txt.Function.ForeignName}}.
|
||||||
|
// Uses the global database handle.
|
||||||
|
func (o *{{$txt.LocalTable.NameGo}}) Set{{$txt.Function.Name}}G(insert bool, related *{{$txt.ForeignTable.NameGo}}) error {
|
||||||
|
return o.Set{{$txt.Function.Name}}(boil.GetDB(), insert, related)
|
||||||
|
}
|
||||||
|
|
||||||
|
// Set{{$txt.Function.Name}}P of the {{.Table | singular}} to the related item.
|
||||||
|
// Sets o.R.{{$txt.Function.Name}} to related.
|
||||||
|
// Adds o to related.R.{{$txt.Function.ForeignName}}.
|
||||||
|
// Panics on error.
|
||||||
|
func (o *{{$txt.LocalTable.NameGo}}) Set{{$txt.Function.Name}}P(exec boil.Executor, insert bool, related *{{$txt.ForeignTable.NameGo}}) {
|
||||||
|
if err := o.Set{{$txt.Function.Name}}(exec, insert, related); err != nil {
|
||||||
|
panic(errors.Err(err))
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
// Set{{$txt.Function.Name}}GP of the {{.Table | singular}} to the related item.
|
||||||
|
// Sets o.R.{{$txt.Function.Name}} to related.
|
||||||
|
// Adds o to related.R.{{$txt.Function.ForeignName}}.
|
||||||
|
// Uses the global database handle and panics on error.
|
||||||
|
func (o *{{$txt.LocalTable.NameGo}}) Set{{$txt.Function.Name}}GP(insert bool, related *{{$txt.ForeignTable.NameGo}}) {
|
||||||
|
if err := o.Set{{$txt.Function.Name}}(boil.GetDB(), insert, related); err != nil {
|
||||||
|
panic(errors.Err(err))
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
// Set{{$txt.Function.Name}} of the {{.Table | singular}} to the related item.
|
// Set{{$txt.Function.Name}} of the {{.Table | singular}} to the related item.
|
||||||
// Sets o.R.{{$txt.Function.Name}} to related.
|
// Sets o.R.{{$txt.Function.Name}} to related.
|
||||||
// Adds o to related.R.{{$txt.Function.ForeignName}}.
|
// Adds o to related.R.{{$txt.Function.ForeignName}}.
|
||||||
|
@ -13,7 +41,7 @@ func (o *{{$txt.LocalTable.NameGo}}) Set{{$txt.Function.Name}}(exec boil.Executo
|
||||||
var err error
|
var err error
|
||||||
if insert {
|
if insert {
|
||||||
if err = related.Insert(exec); err != nil {
|
if err = related.Insert(exec); err != nil {
|
||||||
return errors.Wrap(err, "failed to insert into foreign table")
|
return errors.Prefix("failed to insert into foreign table", err)
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -30,7 +58,7 @@ func (o *{{$txt.LocalTable.NameGo}}) Set{{$txt.Function.Name}}(exec boil.Executo
|
||||||
}
|
}
|
||||||
|
|
||||||
if _, err = exec.Exec(updateQuery, values...); err != nil {
|
if _, err = exec.Exec(updateQuery, values...); err != nil {
|
||||||
return errors.Wrap(err, "failed to update local table")
|
return errors.Prefix("failed to update local table", err)
|
||||||
}
|
}
|
||||||
|
|
||||||
o.{{$txt.Function.LocalAssignment}} = related.{{$txt.Function.ForeignAssignment}}
|
o.{{$txt.Function.LocalAssignment}} = related.{{$txt.Function.ForeignAssignment}}
|
||||||
|
@ -68,6 +96,34 @@ func (o *{{$txt.LocalTable.NameGo}}) Set{{$txt.Function.Name}}(exec boil.Executo
|
||||||
}
|
}
|
||||||
|
|
||||||
{{- if .Nullable}}
|
{{- if .Nullable}}
|
||||||
|
// Remove{{$txt.Function.Name}}G relationship.
|
||||||
|
// Sets o.R.{{$txt.Function.Name}} to nil.
|
||||||
|
// Removes o from all passed in related items' relationships struct (Optional).
|
||||||
|
// Uses the global database handle.
|
||||||
|
func (o *{{$txt.LocalTable.NameGo}}) Remove{{$txt.Function.Name}}G(related *{{$txt.ForeignTable.NameGo}}) error {
|
||||||
|
return o.Remove{{$txt.Function.Name}}(boil.GetDB(), related)
|
||||||
|
}
|
||||||
|
|
||||||
|
// Remove{{$txt.Function.Name}}P relationship.
|
||||||
|
// Sets o.R.{{$txt.Function.Name}} to nil.
|
||||||
|
// Removes o from all passed in related items' relationships struct (Optional).
|
||||||
|
// Panics on error.
|
||||||
|
func (o *{{$txt.LocalTable.NameGo}}) Remove{{$txt.Function.Name}}P(exec boil.Executor, related *{{$txt.ForeignTable.NameGo}}) {
|
||||||
|
if err := o.Remove{{$txt.Function.Name}}(exec, related); err != nil {
|
||||||
|
panic(errors.Err(err))
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
// Remove{{$txt.Function.Name}}GP relationship.
|
||||||
|
// Sets o.R.{{$txt.Function.Name}} to nil.
|
||||||
|
// Removes o from all passed in related items' relationships struct (Optional).
|
||||||
|
// Uses the global database handle and panics on error.
|
||||||
|
func (o *{{$txt.LocalTable.NameGo}}) Remove{{$txt.Function.Name}}GP(related *{{$txt.ForeignTable.NameGo}}) {
|
||||||
|
if err := o.Remove{{$txt.Function.Name}}(boil.GetDB(), related); err != nil {
|
||||||
|
panic(errors.Err(err))
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
// Remove{{$txt.Function.Name}} relationship.
|
// Remove{{$txt.Function.Name}} relationship.
|
||||||
// Sets o.R.{{$txt.Function.Name}} to nil.
|
// Sets o.R.{{$txt.Function.Name}} to nil.
|
||||||
// Removes o from all passed in related items' relationships struct (Optional).
|
// Removes o from all passed in related items' relationships struct (Optional).
|
||||||
|
@ -77,7 +133,7 @@ func (o *{{$txt.LocalTable.NameGo}}) Remove{{$txt.Function.Name}}(exec boil.Exec
|
||||||
o.{{$txt.LocalTable.ColumnNameGo}}.Valid = false
|
o.{{$txt.LocalTable.ColumnNameGo}}.Valid = false
|
||||||
if err = o.Update(exec, "{{.Column}}"); err != nil {
|
if err = o.Update(exec, "{{.Column}}"); err != nil {
|
||||||
o.{{$txt.LocalTable.ColumnNameGo}}.Valid = true
|
o.{{$txt.LocalTable.ColumnNameGo}}.Valid = true
|
||||||
return errors.Wrap(err, "failed to update local table")
|
return errors.Prefix("failed to update local table", err)
|
||||||
}
|
}
|
||||||
|
|
||||||
o.R.{{$txt.Function.Name}} = nil
|
o.R.{{$txt.Function.Name}} = nil
|
||||||
|
|
|
@ -7,6 +7,34 @@
|
||||||
{{- $foreignVarNameSingular := .ForeignTable | singular | camelCase -}}
|
{{- $foreignVarNameSingular := .ForeignTable | singular | camelCase -}}
|
||||||
{{- $foreignPKeyCols := (getTable $dot.Tables .ForeignTable).PKey.Columns -}}
|
{{- $foreignPKeyCols := (getTable $dot.Tables .ForeignTable).PKey.Columns -}}
|
||||||
{{- $foreignSchemaTable := .ForeignTable | $dot.SchemaTable}}
|
{{- $foreignSchemaTable := .ForeignTable | $dot.SchemaTable}}
|
||||||
|
// Set{{$txt.Function.Name}}G of the {{.Table | singular}} to the related item.
|
||||||
|
// Sets o.R.{{$txt.Function.Name}} to related.
|
||||||
|
// Adds o to related.R.{{$txt.Function.ForeignName}}.
|
||||||
|
// Uses the global database handle.
|
||||||
|
func (o *{{$txt.LocalTable.NameGo}}) Set{{$txt.Function.Name}}G(insert bool, related *{{$txt.ForeignTable.NameGo}}) error {
|
||||||
|
return o.Set{{$txt.Function.Name}}(boil.GetDB(), insert, related)
|
||||||
|
}
|
||||||
|
|
||||||
|
// Set{{$txt.Function.Name}}P of the {{.Table | singular}} to the related item.
|
||||||
|
// Sets o.R.{{$txt.Function.Name}} to related.
|
||||||
|
// Adds o to related.R.{{$txt.Function.ForeignName}}.
|
||||||
|
// Panics on error.
|
||||||
|
func (o *{{$txt.LocalTable.NameGo}}) Set{{$txt.Function.Name}}P(exec boil.Executor, insert bool, related *{{$txt.ForeignTable.NameGo}}) {
|
||||||
|
if err := o.Set{{$txt.Function.Name}}(exec, insert, related); err != nil {
|
||||||
|
panic(errors.Err(err))
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
// Set{{$txt.Function.Name}}GP of the {{.Table | singular}} to the related item.
|
||||||
|
// Sets o.R.{{$txt.Function.Name}} to related.
|
||||||
|
// Adds o to related.R.{{$txt.Function.ForeignName}}.
|
||||||
|
// Uses the global database handle and panics on error.
|
||||||
|
func (o *{{$txt.LocalTable.NameGo}}) Set{{$txt.Function.Name}}GP(insert bool, related *{{$txt.ForeignTable.NameGo}}) {
|
||||||
|
if err := o.Set{{$txt.Function.Name}}(boil.GetDB(), insert, related); err != nil {
|
||||||
|
panic(errors.Err(err))
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
// Set{{$txt.Function.Name}} of the {{.Table | singular}} to the related item.
|
// Set{{$txt.Function.Name}} of the {{.Table | singular}} to the related item.
|
||||||
// Sets o.R.{{$txt.Function.Name}} to related.
|
// Sets o.R.{{$txt.Function.Name}} to related.
|
||||||
// Adds o to related.R.{{$txt.Function.ForeignName}}.
|
// Adds o to related.R.{{$txt.Function.ForeignName}}.
|
||||||
|
@ -20,7 +48,7 @@ func (o *{{$txt.LocalTable.NameGo}}) Set{{$txt.Function.Name}}(exec boil.Executo
|
||||||
{{- end}}
|
{{- end}}
|
||||||
|
|
||||||
if err = related.Insert(exec); err != nil {
|
if err = related.Insert(exec); err != nil {
|
||||||
return errors.Wrap(err, "failed to insert into foreign table")
|
return errors.Prefix("failed to insert into foreign table", err)
|
||||||
}
|
}
|
||||||
} else {
|
} else {
|
||||||
updateQuery := fmt.Sprintf(
|
updateQuery := fmt.Sprintf(
|
||||||
|
@ -36,7 +64,7 @@ func (o *{{$txt.LocalTable.NameGo}}) Set{{$txt.Function.Name}}(exec boil.Executo
|
||||||
}
|
}
|
||||||
|
|
||||||
if _, err = exec.Exec(updateQuery, values...); err != nil {
|
if _, err = exec.Exec(updateQuery, values...); err != nil {
|
||||||
return errors.Wrap(err, "failed to update foreign table")
|
return errors.Prefix("failed to update foreign table", err)
|
||||||
}
|
}
|
||||||
|
|
||||||
related.{{$txt.Function.ForeignAssignment}} = o.{{$txt.Function.LocalAssignment}}
|
related.{{$txt.Function.ForeignAssignment}} = o.{{$txt.Function.LocalAssignment}}
|
||||||
|
@ -65,6 +93,34 @@ func (o *{{$txt.LocalTable.NameGo}}) Set{{$txt.Function.Name}}(exec boil.Executo
|
||||||
}
|
}
|
||||||
|
|
||||||
{{- if .ForeignColumnNullable}}
|
{{- if .ForeignColumnNullable}}
|
||||||
|
// Remove{{$txt.Function.Name}}G relationship.
|
||||||
|
// Sets o.R.{{$txt.Function.Name}} to nil.
|
||||||
|
// Removes o from all passed in related items' relationships struct (Optional).
|
||||||
|
// Uses the global database handle.
|
||||||
|
func (o *{{$txt.LocalTable.NameGo}}) Remove{{$txt.Function.Name}}G(related *{{$txt.ForeignTable.NameGo}}) error {
|
||||||
|
return o.Remove{{$txt.Function.Name}}(boil.GetDB(), related)
|
||||||
|
}
|
||||||
|
|
||||||
|
// Remove{{$txt.Function.Name}}P relationship.
|
||||||
|
// Sets o.R.{{$txt.Function.Name}} to nil.
|
||||||
|
// Removes o from all passed in related items' relationships struct (Optional).
|
||||||
|
// Panics on error.
|
||||||
|
func (o *{{$txt.LocalTable.NameGo}}) Remove{{$txt.Function.Name}}P(exec boil.Executor, related *{{$txt.ForeignTable.NameGo}}) {
|
||||||
|
if err := o.Remove{{$txt.Function.Name}}(exec, related); err != nil {
|
||||||
|
panic(errors.Err(err))
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
// Remove{{$txt.Function.Name}}GP relationship.
|
||||||
|
// Sets o.R.{{$txt.Function.Name}} to nil.
|
||||||
|
// Removes o from all passed in related items' relationships struct (Optional).
|
||||||
|
// Uses the global database handle and panics on error.
|
||||||
|
func (o *{{$txt.LocalTable.NameGo}}) Remove{{$txt.Function.Name}}GP(related *{{$txt.ForeignTable.NameGo}}) {
|
||||||
|
if err := o.Remove{{$txt.Function.Name}}(boil.GetDB(), related); err != nil {
|
||||||
|
panic(errors.Err(err))
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
// Remove{{$txt.Function.Name}} relationship.
|
// Remove{{$txt.Function.Name}} relationship.
|
||||||
// Sets o.R.{{$txt.Function.Name}} to nil.
|
// Sets o.R.{{$txt.Function.Name}} to nil.
|
||||||
// Removes o from all passed in related items' relationships struct (Optional).
|
// Removes o from all passed in related items' relationships struct (Optional).
|
||||||
|
@ -74,7 +130,7 @@ func (o *{{$txt.LocalTable.NameGo}}) Remove{{$txt.Function.Name}}(exec boil.Exec
|
||||||
related.{{$txt.ForeignTable.ColumnNameGo}}.Valid = false
|
related.{{$txt.ForeignTable.ColumnNameGo}}.Valid = false
|
||||||
if err = related.Update(exec, "{{.ForeignColumn}}"); err != nil {
|
if err = related.Update(exec, "{{.ForeignColumn}}"); err != nil {
|
||||||
related.{{$txt.ForeignTable.ColumnNameGo}}.Valid = true
|
related.{{$txt.ForeignTable.ColumnNameGo}}.Valid = true
|
||||||
return errors.Wrap(err, "failed to update local table")
|
return errors.Prefix("failed to update local table", err)
|
||||||
}
|
}
|
||||||
|
|
||||||
o.R.{{$txt.Function.Name}} = nil
|
o.R.{{$txt.Function.Name}} = nil
|
||||||
|
|
|
@ -6,6 +6,39 @@
|
||||||
{{- $txt := txtsFromToMany $dot.Tables $table . -}}
|
{{- $txt := txtsFromToMany $dot.Tables $table . -}}
|
||||||
{{- $varNameSingular := .Table | singular | camelCase -}}
|
{{- $varNameSingular := .Table | singular | camelCase -}}
|
||||||
{{- $foreignVarNameSingular := .ForeignTable | singular | camelCase}}
|
{{- $foreignVarNameSingular := .ForeignTable | singular | camelCase}}
|
||||||
|
{{- $foreignPKeyCols := (getTable $dot.Tables .ForeignTable).PKey.Columns -}}
|
||||||
|
{{- $foreignSchemaTable := .ForeignTable | $dot.SchemaTable}}
|
||||||
|
// Add{{$txt.Function.Name}}G adds the given related objects to the existing relationships
|
||||||
|
// of the {{$table.Name | singular}}, optionally inserting them as new records.
|
||||||
|
// Appends related to o.R.{{$txt.Function.Name}}.
|
||||||
|
// Sets related.R.{{$txt.Function.ForeignName}} appropriately.
|
||||||
|
// Uses the global database handle.
|
||||||
|
func (o *{{$txt.LocalTable.NameGo}}) Add{{$txt.Function.Name}}G(insert bool, related ...*{{$txt.ForeignTable.NameGo}}) error {
|
||||||
|
return o.Add{{$txt.Function.Name}}(boil.GetDB(), insert, related...)
|
||||||
|
}
|
||||||
|
|
||||||
|
// Add{{$txt.Function.Name}}P adds the given related objects to the existing relationships
|
||||||
|
// of the {{$table.Name | singular}}, optionally inserting them as new records.
|
||||||
|
// Appends related to o.R.{{$txt.Function.Name}}.
|
||||||
|
// Sets related.R.{{$txt.Function.ForeignName}} appropriately.
|
||||||
|
// Panics on error.
|
||||||
|
func (o *{{$txt.LocalTable.NameGo}}) Add{{$txt.Function.Name}}P(exec boil.Executor, insert bool, related ...*{{$txt.ForeignTable.NameGo}}) {
|
||||||
|
if err := o.Add{{$txt.Function.Name}}(exec, insert, related...); err != nil {
|
||||||
|
panic(errors.Err(err))
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
// Add{{$txt.Function.Name}}GP adds the given related objects to the existing relationships
|
||||||
|
// of the {{$table.Name | singular}}, optionally inserting them as new records.
|
||||||
|
// Appends related to o.R.{{$txt.Function.Name}}.
|
||||||
|
// Sets related.R.{{$txt.Function.ForeignName}} appropriately.
|
||||||
|
// Uses the global database handle and panics on error.
|
||||||
|
func (o *{{$txt.LocalTable.NameGo}}) Add{{$txt.Function.Name}}GP(insert bool, related ...*{{$txt.ForeignTable.NameGo}}) {
|
||||||
|
if err := o.Add{{$txt.Function.Name}}(boil.GetDB(), insert, related...); err != nil {
|
||||||
|
panic(errors.Err(err))
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
// Add{{$txt.Function.Name}} adds the given related objects to the existing relationships
|
// Add{{$txt.Function.Name}} adds the given related objects to the existing relationships
|
||||||
// of the {{$table.Name | singular}}, optionally inserting them as new records.
|
// of the {{$table.Name | singular}}, optionally inserting them as new records.
|
||||||
// Appends related to o.R.{{$txt.Function.Name}}.
|
// Appends related to o.R.{{$txt.Function.Name}}.
|
||||||
|
@ -13,20 +46,38 @@
|
||||||
func (o *{{$txt.LocalTable.NameGo}}) Add{{$txt.Function.Name}}(exec boil.Executor, insert bool, related ...*{{$txt.ForeignTable.NameGo}}) error {
|
func (o *{{$txt.LocalTable.NameGo}}) Add{{$txt.Function.Name}}(exec boil.Executor, insert bool, related ...*{{$txt.ForeignTable.NameGo}}) error {
|
||||||
var err error
|
var err error
|
||||||
for _, rel := range related {
|
for _, rel := range related {
|
||||||
|
if insert {
|
||||||
{{if not .ToJoinTable -}}
|
{{if not .ToJoinTable -}}
|
||||||
rel.{{$txt.Function.ForeignAssignment}} = o.{{$txt.Function.LocalAssignment}}
|
rel.{{$txt.Function.ForeignAssignment}} = o.{{$txt.Function.LocalAssignment}}
|
||||||
{{if .ForeignColumnNullable -}}
|
{{if .ForeignColumnNullable -}}
|
||||||
rel.{{$txt.ForeignTable.ColumnNameGo}}.Valid = true
|
rel.{{$txt.ForeignTable.ColumnNameGo}}.Valid = true
|
||||||
{{end -}}
|
{{end -}}
|
||||||
{{end -}}
|
{{end -}}
|
||||||
if insert {
|
|
||||||
if err = rel.Insert(exec); err != nil {
|
if err = rel.Insert(exec); err != nil {
|
||||||
return errors.Wrap(err, "failed to insert into foreign table")
|
return errors.Prefix("failed to insert into foreign table", err)
|
||||||
}
|
}
|
||||||
}{{if not .ToJoinTable}} else {
|
}{{if not .ToJoinTable}} else {
|
||||||
if err = rel.Update(exec, "{{.ForeignColumn}}"); err != nil {
|
updateQuery := fmt.Sprintf(
|
||||||
return errors.Wrap(err, "failed to update foreign table")
|
"UPDATE {{$foreignSchemaTable}} SET %s WHERE %s",
|
||||||
|
strmangle.SetParamNames("{{$dot.LQ}}", "{{$dot.RQ}}", {{if $dot.Dialect.IndexPlaceholders}}1{{else}}0{{end}}, []string{{"{"}}"{{.ForeignColumn}}"{{"}"}}),
|
||||||
|
strmangle.WhereClause("{{$dot.LQ}}", "{{$dot.RQ}}", {{if $dot.Dialect.IndexPlaceholders}}2{{else}}0{{end}}, {{$foreignVarNameSingular}}PrimaryKeyColumns),
|
||||||
|
)
|
||||||
|
values := []interface{}{o.{{$txt.LocalTable.ColumnNameGo}}, rel.{{$foreignPKeyCols | stringMap $dot.StringFuncs.titleCase | join ", rel."}}{{"}"}}
|
||||||
|
|
||||||
|
if boil.DebugMode {
|
||||||
|
fmt.Fprintln(boil.DebugWriter, updateQuery)
|
||||||
|
fmt.Fprintln(boil.DebugWriter, values)
|
||||||
}
|
}
|
||||||
|
|
||||||
|
if _, err = exec.Exec(updateQuery, values...); err != nil {
|
||||||
|
return errors.Prefix("failed to update foreign table", err)
|
||||||
|
}
|
||||||
|
|
||||||
|
rel.{{$txt.Function.ForeignAssignment}} = o.{{$txt.Function.LocalAssignment}}
|
||||||
|
{{if .ForeignColumnNullable -}}
|
||||||
|
rel.{{$txt.ForeignTable.ColumnNameGo}}.Valid = true
|
||||||
|
{{end -}}
|
||||||
}{{end -}}
|
}{{end -}}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -42,7 +93,7 @@ func (o *{{$txt.LocalTable.NameGo}}) Add{{$txt.Function.Name}}(exec boil.Executo
|
||||||
|
|
||||||
_, err = exec.Exec(query, values...)
|
_, err = exec.Exec(query, values...)
|
||||||
if err != nil {
|
if err != nil {
|
||||||
return errors.Wrap(err, "failed to insert into join table")
|
return errors.Prefix("failed to insert into join table", err)
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
{{end -}}
|
{{end -}}
|
||||||
|
@ -81,6 +132,43 @@ func (o *{{$txt.LocalTable.NameGo}}) Add{{$txt.Function.Name}}(exec boil.Executo
|
||||||
}
|
}
|
||||||
|
|
||||||
{{- if (or .ForeignColumnNullable .ToJoinTable)}}
|
{{- if (or .ForeignColumnNullable .ToJoinTable)}}
|
||||||
|
// Set{{$txt.Function.Name}}G removes all previously related items of the
|
||||||
|
// {{$table.Name | singular}} replacing them completely with the passed
|
||||||
|
// in related items, optionally inserting them as new records.
|
||||||
|
// Sets o.R.{{$txt.Function.ForeignName}}'s {{$txt.Function.Name}} accordingly.
|
||||||
|
// Replaces o.R.{{$txt.Function.Name}} with related.
|
||||||
|
// Sets related.R.{{$txt.Function.ForeignName}}'s {{$txt.Function.Name}} accordingly.
|
||||||
|
// Uses the global database handle.
|
||||||
|
func (o *{{$txt.LocalTable.NameGo}}) Set{{$txt.Function.Name}}G(insert bool, related ...*{{$txt.ForeignTable.NameGo}}) error {
|
||||||
|
return o.Set{{$txt.Function.Name}}(boil.GetDB(), insert, related...)
|
||||||
|
}
|
||||||
|
|
||||||
|
// Set{{$txt.Function.Name}}P removes all previously related items of the
|
||||||
|
// {{$table.Name | singular}} replacing them completely with the passed
|
||||||
|
// in related items, optionally inserting them as new records.
|
||||||
|
// Sets o.R.{{$txt.Function.ForeignName}}'s {{$txt.Function.Name}} accordingly.
|
||||||
|
// Replaces o.R.{{$txt.Function.Name}} with related.
|
||||||
|
// Sets related.R.{{$txt.Function.ForeignName}}'s {{$txt.Function.Name}} accordingly.
|
||||||
|
// Panics on error.
|
||||||
|
func (o *{{$txt.LocalTable.NameGo}}) Set{{$txt.Function.Name}}P(exec boil.Executor, insert bool, related ...*{{$txt.ForeignTable.NameGo}}) {
|
||||||
|
if err := o.Set{{$txt.Function.Name}}(exec, insert, related...); err != nil {
|
||||||
|
panic(errors.Err(err))
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
// Set{{$txt.Function.Name}}GP removes all previously related items of the
|
||||||
|
// {{$table.Name | singular}} replacing them completely with the passed
|
||||||
|
// in related items, optionally inserting them as new records.
|
||||||
|
// Sets o.R.{{$txt.Function.ForeignName}}'s {{$txt.Function.Name}} accordingly.
|
||||||
|
// Replaces o.R.{{$txt.Function.Name}} with related.
|
||||||
|
// Sets related.R.{{$txt.Function.ForeignName}}'s {{$txt.Function.Name}} accordingly.
|
||||||
|
// Uses the global database handle and panics on error.
|
||||||
|
func (o *{{$txt.LocalTable.NameGo}}) Set{{$txt.Function.Name}}GP(insert bool, related ...*{{$txt.ForeignTable.NameGo}}) {
|
||||||
|
if err := o.Set{{$txt.Function.Name}}(boil.GetDB(), insert, related...); err != nil {
|
||||||
|
panic(errors.Err(err))
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
// Set{{$txt.Function.Name}} removes all previously related items of the
|
// Set{{$txt.Function.Name}} removes all previously related items of the
|
||||||
// {{$table.Name | singular}} replacing them completely with the passed
|
// {{$table.Name | singular}} replacing them completely with the passed
|
||||||
// in related items, optionally inserting them as new records.
|
// in related items, optionally inserting them as new records.
|
||||||
|
@ -102,12 +190,14 @@ func (o *{{$txt.LocalTable.NameGo}}) Set{{$txt.Function.Name}}(exec boil.Executo
|
||||||
|
|
||||||
_, err := exec.Exec(query, values...)
|
_, err := exec.Exec(query, values...)
|
||||||
if err != nil {
|
if err != nil {
|
||||||
return errors.Wrap(err, "failed to remove relationships before set")
|
return errors.Prefix("failed to remove relationships before set", err)
|
||||||
}
|
}
|
||||||
|
|
||||||
{{if .ToJoinTable -}}
|
{{if .ToJoinTable -}}
|
||||||
remove{{$txt.Function.Name}}From{{$txt.Function.ForeignName}}Slice(o, related)
|
remove{{$txt.Function.Name}}From{{$txt.Function.ForeignName}}Slice(o, related)
|
||||||
|
if o.R != nil {
|
||||||
o.R.{{$txt.Function.Name}} = nil
|
o.R.{{$txt.Function.Name}} = nil
|
||||||
|
}
|
||||||
{{else -}}
|
{{else -}}
|
||||||
if o.R != nil {
|
if o.R != nil {
|
||||||
for _, rel := range o.R.{{$txt.Function.Name}} {
|
for _, rel := range o.R.{{$txt.Function.Name}} {
|
||||||
|
@ -126,6 +216,34 @@ func (o *{{$txt.LocalTable.NameGo}}) Set{{$txt.Function.Name}}(exec boil.Executo
|
||||||
return o.Add{{$txt.Function.Name}}(exec, insert, related...)
|
return o.Add{{$txt.Function.Name}}(exec, insert, related...)
|
||||||
}
|
}
|
||||||
|
|
||||||
|
// Remove{{$txt.Function.Name}}G relationships from objects passed in.
|
||||||
|
// Removes related items from R.{{$txt.Function.Name}} (uses pointer comparison, removal does not keep order)
|
||||||
|
// Sets related.R.{{$txt.Function.ForeignName}}.
|
||||||
|
// Uses the global database handle.
|
||||||
|
func (o *{{$txt.LocalTable.NameGo}}) Remove{{$txt.Function.Name}}G(related ...*{{$txt.ForeignTable.NameGo}}) error {
|
||||||
|
return o.Remove{{$txt.Function.Name}}(boil.GetDB(), related...)
|
||||||
|
}
|
||||||
|
|
||||||
|
// Remove{{$txt.Function.Name}}P relationships from objects passed in.
|
||||||
|
// Removes related items from R.{{$txt.Function.Name}} (uses pointer comparison, removal does not keep order)
|
||||||
|
// Sets related.R.{{$txt.Function.ForeignName}}.
|
||||||
|
// Panics on error.
|
||||||
|
func (o *{{$txt.LocalTable.NameGo}}) Remove{{$txt.Function.Name}}P(exec boil.Executor, related ...*{{$txt.ForeignTable.NameGo}}) {
|
||||||
|
if err := o.Remove{{$txt.Function.Name}}(exec, related...); err != nil {
|
||||||
|
panic(errors.Err(err))
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
// Remove{{$txt.Function.Name}}GP relationships from objects passed in.
|
||||||
|
// Removes related items from R.{{$txt.Function.Name}} (uses pointer comparison, removal does not keep order)
|
||||||
|
// Sets related.R.{{$txt.Function.ForeignName}}.
|
||||||
|
// Uses the global database handle and panics on error.
|
||||||
|
func (o *{{$txt.LocalTable.NameGo}}) Remove{{$txt.Function.Name}}GP(related ...*{{$txt.ForeignTable.NameGo}}) {
|
||||||
|
if err := o.Remove{{$txt.Function.Name}}(boil.GetDB(), related...); err != nil {
|
||||||
|
panic(errors.Err(err))
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
// Remove{{$txt.Function.Name}} relationships from objects passed in.
|
// Remove{{$txt.Function.Name}} relationships from objects passed in.
|
||||||
// Removes related items from R.{{$txt.Function.Name}} (uses pointer comparison, removal does not keep order)
|
// Removes related items from R.{{$txt.Function.Name}} (uses pointer comparison, removal does not keep order)
|
||||||
// Sets related.R.{{$txt.Function.ForeignName}}.
|
// Sets related.R.{{$txt.Function.ForeignName}}.
|
||||||
|
@ -134,9 +252,12 @@ func (o *{{$txt.LocalTable.NameGo}}) Remove{{$txt.Function.Name}}(exec boil.Exec
|
||||||
{{if .ToJoinTable -}}
|
{{if .ToJoinTable -}}
|
||||||
query := fmt.Sprintf(
|
query := fmt.Sprintf(
|
||||||
"delete from {{.JoinTable | $dot.SchemaTable}} where {{.JoinLocalColumn | $dot.Quotes}} = {{if $dot.Dialect.IndexPlaceholders}}$1{{else}}?{{end}} and {{.JoinForeignColumn | $dot.Quotes}} in (%s)",
|
"delete from {{.JoinTable | $dot.SchemaTable}} where {{.JoinLocalColumn | $dot.Quotes}} = {{if $dot.Dialect.IndexPlaceholders}}$1{{else}}?{{end}} and {{.JoinForeignColumn | $dot.Quotes}} in (%s)",
|
||||||
strmangle.Placeholders(dialect.IndexPlaceholders, len(related), 1, 1),
|
strmangle.Placeholders(dialect.IndexPlaceholders, len(related), 2, 1),
|
||||||
)
|
)
|
||||||
values := []interface{}{{"{"}}o.{{$txt.LocalTable.ColumnNameGo}}}
|
values := []interface{}{{"{"}}o.{{$txt.LocalTable.ColumnNameGo}}}
|
||||||
|
for _, rel := range related {
|
||||||
|
values = append(values, rel.{{$txt.ForeignTable.ColumnNameGo}})
|
||||||
|
}
|
||||||
|
|
||||||
if boil.DebugMode {
|
if boil.DebugMode {
|
||||||
fmt.Fprintln(boil.DebugWriter, query)
|
fmt.Fprintln(boil.DebugWriter, query)
|
||||||
|
@ -145,7 +266,7 @@ func (o *{{$txt.LocalTable.NameGo}}) Remove{{$txt.Function.Name}}(exec boil.Exec
|
||||||
|
|
||||||
_, err = exec.Exec(query, values...)
|
_, err = exec.Exec(query, values...)
|
||||||
if err != nil {
|
if err != nil {
|
||||||
return errors.Wrap(err, "failed to remove relationships before set")
|
return errors.Prefix("failed to remove relationships before set", err)
|
||||||
}
|
}
|
||||||
{{else -}}
|
{{else -}}
|
||||||
for _, rel := range related {
|
for _, rel := range related {
|
||||||
|
@ -156,7 +277,7 @@ func (o *{{$txt.LocalTable.NameGo}}) Remove{{$txt.Function.Name}}(exec boil.Exec
|
||||||
}
|
}
|
||||||
{{end -}}
|
{{end -}}
|
||||||
if err = rel.Update(exec, "{{.ForeignColumn}}"); err != nil {
|
if err = rel.Update(exec, "{{.ForeignColumn}}"); err != nil {
|
||||||
return err
|
return errors.Err(err)
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
{{end -}}
|
{{end -}}
|
||||||
|
|
|
@ -1,12 +1,12 @@
|
||||||
{{- $tableNamePlural := .Table.Name | plural | titleCase -}}
|
{{- $tableNamePlural := .Table.Name | plural | titleCase -}}
|
||||||
{{- $varNameSingular := .Table.Name | singular | camelCase}}
|
{{- $tableNameSingular := .Table.Name | singular | titleCase}}
|
||||||
// {{$tableNamePlural}}G retrieves all records.
|
// {{$tableNamePlural}}G retrieves all records.
|
||||||
func {{$tableNamePlural}}G(mods ...qm.QueryMod) {{$varNameSingular}}Query {
|
func {{$tableNamePlural}}G(mods ...qm.QueryMod) {{$tableNameSingular}}Query {
|
||||||
return {{$tableNamePlural}}(boil.GetDB(), mods...)
|
return {{$tableNamePlural}}(boil.GetDB(), mods...)
|
||||||
}
|
}
|
||||||
|
|
||||||
// {{$tableNamePlural}} retrieves all the records using an executor.
|
// {{$tableNamePlural}} retrieves all the records using an executor.
|
||||||
func {{$tableNamePlural}}(exec boil.Executor, mods ...qm.QueryMod) {{$varNameSingular}}Query {
|
func {{$tableNamePlural}}(exec boil.Executor, mods ...qm.QueryMod) {{$tableNameSingular}}Query {
|
||||||
mods = append(mods, qm.From("{{.Table.Name | .SchemaTable}}"))
|
mods = append(mods, qm.From("{{.Table.Name | .SchemaTable}}"))
|
||||||
return {{$varNameSingular}}Query{NewQuery(exec, mods...)}
|
return {{$tableNameSingular}}Query{NewQuery(exec, mods...)}
|
||||||
}
|
}
|
||||||
|
|
|
@ -1,7 +1,7 @@
|
||||||
{{- $tableNameSingular := .Table.Name | singular | titleCase -}}
|
{{- $tableNameSingular := .Table.Name | singular | titleCase -}}
|
||||||
{{- $varNameSingular := .Table.Name | singular | camelCase -}}
|
{{- $varNameSingular := .Table.Name | singular | camelCase -}}
|
||||||
{{- $colDefs := sqlColDefinitions .Table.Columns .Table.PKey.Columns -}}
|
{{- $colDefs := sqlColDefinitions .Table.Columns .Table.PKey.Columns -}}
|
||||||
{{- $pkNames := $colDefs.Names | stringMap .StringFuncs.camelCase -}}
|
{{- $pkNames := $colDefs.Names | stringMap .StringFuncs.camelCase | stringMap .StringFuncs.replaceReserved -}}
|
||||||
{{- $pkArgs := joinSlices " " $pkNames $colDefs.Types | join ", "}}
|
{{- $pkArgs := joinSlices " " $pkNames $colDefs.Types | join ", "}}
|
||||||
// Find{{$tableNameSingular}}G retrieves a single record by ID.
|
// Find{{$tableNameSingular}}G retrieves a single record by ID.
|
||||||
func Find{{$tableNameSingular}}G({{$pkArgs}}, selectCols ...string) (*{{$tableNameSingular}}, error) {
|
func Find{{$tableNameSingular}}G({{$pkArgs}}, selectCols ...string) (*{{$tableNameSingular}}, error) {
|
||||||
|
@ -12,7 +12,7 @@ func Find{{$tableNameSingular}}G({{$pkArgs}}, selectCols ...string) (*{{$tableNa
|
||||||
func Find{{$tableNameSingular}}GP({{$pkArgs}}, selectCols ...string) *{{$tableNameSingular}} {
|
func Find{{$tableNameSingular}}GP({{$pkArgs}}, selectCols ...string) *{{$tableNameSingular}} {
|
||||||
retobj, err := Find{{$tableNameSingular}}(boil.GetDB(), {{$pkNames | join ", "}}, selectCols...)
|
retobj, err := Find{{$tableNameSingular}}(boil.GetDB(), {{$pkNames | join ", "}}, selectCols...)
|
||||||
if err != nil {
|
if err != nil {
|
||||||
panic(boil.WrapErr(err))
|
panic(errors.Err(err))
|
||||||
}
|
}
|
||||||
|
|
||||||
return retobj
|
return retobj
|
||||||
|
@ -35,10 +35,10 @@ func Find{{$tableNameSingular}}(exec boil.Executor, {{$pkArgs}}, selectCols ...s
|
||||||
|
|
||||||
err := q.Bind({{$varNameSingular}}Obj)
|
err := q.Bind({{$varNameSingular}}Obj)
|
||||||
if err != nil {
|
if err != nil {
|
||||||
if errors.Cause(err) == sql.ErrNoRows {
|
if errors.Is(err, sql.ErrNoRows) {
|
||||||
return nil, sql.ErrNoRows
|
return nil, nil
|
||||||
}
|
}
|
||||||
return nil, errors.Wrap(err, "{{.PkgName}}: unable to select from {{.Table.Name}}")
|
return nil, errors.Prefix("{{.PkgName}}: unable to select from {{.Table.Name}}", err)
|
||||||
}
|
}
|
||||||
|
|
||||||
return {{$varNameSingular}}Obj, nil
|
return {{$varNameSingular}}Obj, nil
|
||||||
|
@ -48,8 +48,79 @@ func Find{{$tableNameSingular}}(exec boil.Executor, {{$pkArgs}}, selectCols ...s
|
||||||
func Find{{$tableNameSingular}}P(exec boil.Executor, {{$pkArgs}}, selectCols ...string) *{{$tableNameSingular}} {
|
func Find{{$tableNameSingular}}P(exec boil.Executor, {{$pkArgs}}, selectCols ...string) *{{$tableNameSingular}} {
|
||||||
retobj, err := Find{{$tableNameSingular}}(exec, {{$pkNames | join ", "}}, selectCols...)
|
retobj, err := Find{{$tableNameSingular}}(exec, {{$pkNames | join ", "}}, selectCols...)
|
||||||
if err != nil {
|
if err != nil {
|
||||||
panic(boil.WrapErr(err))
|
panic(errors.Err(err))
|
||||||
}
|
}
|
||||||
|
|
||||||
return retobj
|
return retobj
|
||||||
}
|
}
|
||||||
|
|
||||||
|
// FindOne{{$tableNameSingular}} retrieves a single record using filters.
|
||||||
|
func FindOne{{$tableNameSingular}}(exec boil.Executor, filters {{$tableNameSingular}}Filter) (*{{$tableNameSingular}}, error) {
|
||||||
|
obj := &{{$tableNameSingular}}{}
|
||||||
|
|
||||||
|
err := {{$tableNameSingular}}NewQuery(exec).
|
||||||
|
Where(filters).
|
||||||
|
Limit(1).
|
||||||
|
Bind(obj)
|
||||||
|
|
||||||
|
if err != nil {
|
||||||
|
if errors.Is(err, sql.ErrNoRows) {
|
||||||
|
return nil, nil
|
||||||
|
}
|
||||||
|
return nil, errors.Prefix("{{.PkgName}}: unable to select from {{.Table.Name}}", err)
|
||||||
|
}
|
||||||
|
|
||||||
|
return obj, nil
|
||||||
|
}
|
||||||
|
|
||||||
|
// FindOne{{$tableNameSingular}}G retrieves a single record using filters.
|
||||||
|
func FindOne{{$tableNameSingular}}G(filters {{$tableNameSingular}}Filter) (*{{$tableNameSingular}}, error) {
|
||||||
|
return FindOne{{$tableNameSingular}}(boil.GetDB(), filters)
|
||||||
|
}
|
||||||
|
|
||||||
|
// FindOne{{$tableNameSingular}}OrInit retrieves a single record using filters, or initializes a new record if one is not found.
|
||||||
|
func FindOne{{$tableNameSingular}}OrInit(exec boil.Executor, filters {{$tableNameSingular}}Filter) (*{{$tableNameSingular}}, error) {
|
||||||
|
{{$varNameSingular}}Obj, err := FindOne{{$tableNameSingular}}(exec, filters)
|
||||||
|
if err != nil {
|
||||||
|
return nil, err
|
||||||
|
}
|
||||||
|
|
||||||
|
if {{$varNameSingular}}Obj == nil {
|
||||||
|
{{$varNameSingular}}Obj = &{{$tableNameSingular}}{}
|
||||||
|
objR := reflect.ValueOf({{$varNameSingular}}Obj).Elem()
|
||||||
|
r := reflect.ValueOf(filters)
|
||||||
|
for i := 0; i < r.NumField(); i++ {
|
||||||
|
f := r.Field(i)
|
||||||
|
if f.Elem().IsValid() {
|
||||||
|
objR.FieldByName(r.Type().Field(i).Name).Set(f.Elem())
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
return {{$varNameSingular}}Obj, nil
|
||||||
|
}
|
||||||
|
|
||||||
|
// FindOne{{$tableNameSingular}}OrInit retrieves a single record using filters, or initializes a new record if one is not found.
|
||||||
|
func FindOne{{$tableNameSingular}}OrInitG(filters {{$tableNameSingular}}Filter) (*{{$tableNameSingular}}, error) {
|
||||||
|
return FindOne{{$tableNameSingular}}OrInit(boil.GetDB(), filters)
|
||||||
|
}
|
||||||
|
|
||||||
|
// FindOne{{$tableNameSingular}}OrInit retrieves a single record using filters, or initializes and inserts a new record if one is not found.
|
||||||
|
func FindOne{{$tableNameSingular}}OrCreate(exec boil.Executor, filters {{$tableNameSingular}}Filter) (*{{$tableNameSingular}}, error) {
|
||||||
|
{{$varNameSingular}}Obj, err := FindOne{{$tableNameSingular}}OrInit(exec, filters)
|
||||||
|
if err != nil {
|
||||||
|
return nil, err
|
||||||
|
}
|
||||||
|
if {{$varNameSingular}}Obj.IsNew() {
|
||||||
|
err := {{$varNameSingular}}Obj.Insert(exec)
|
||||||
|
if err != nil {
|
||||||
|
return nil, err
|
||||||
|
}
|
||||||
|
}
|
||||||
|
return {{$varNameSingular}}Obj, nil
|
||||||
|
}
|
||||||
|
|
||||||
|
// FindOne{{$tableNameSingular}}OrInit retrieves a single record using filters, or initializes and inserts a new record if one is not found.
|
||||||
|
func FindOne{{$tableNameSingular}}OrCreateG(filters {{$tableNameSingular}}Filter) (*{{$tableNameSingular}}, error) {
|
||||||
|
return FindOne{{$tableNameSingular}}OrCreate(boil.GetDB(), filters)
|
||||||
|
}
|
||||||
|
|
|
@ -10,7 +10,7 @@ func (o *{{$tableNameSingular}}) InsertG(whitelist ... string) error {
|
||||||
// behavior description.
|
// behavior description.
|
||||||
func (o *{{$tableNameSingular}}) InsertGP(whitelist ... string) {
|
func (o *{{$tableNameSingular}}) InsertGP(whitelist ... string) {
|
||||||
if err := o.Insert(boil.GetDB(), whitelist...); err != nil {
|
if err := o.Insert(boil.GetDB(), whitelist...); err != nil {
|
||||||
panic(boil.WrapErr(err))
|
panic(errors.Err(err))
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -18,7 +18,7 @@ func (o *{{$tableNameSingular}}) InsertGP(whitelist ... string) {
|
||||||
// for whitelist behavior description.
|
// for whitelist behavior description.
|
||||||
func (o *{{$tableNameSingular}}) InsertP(exec boil.Executor, whitelist ... string) {
|
func (o *{{$tableNameSingular}}) InsertP(exec boil.Executor, whitelist ... string) {
|
||||||
if err := o.Insert(exec, whitelist...); err != nil {
|
if err := o.Insert(exec, whitelist...); err != nil {
|
||||||
panic(boil.WrapErr(err))
|
panic(errors.Err(err))
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -29,7 +29,7 @@ func (o *{{$tableNameSingular}}) InsertP(exec boil.Executor, whitelist ... strin
|
||||||
// - All columns with a default, but non-zero are included (i.e. health = 75)
|
// - All columns with a default, but non-zero are included (i.e. health = 75)
|
||||||
func (o *{{$tableNameSingular}}) Insert(exec boil.Executor, whitelist ... string) error {
|
func (o *{{$tableNameSingular}}) Insert(exec boil.Executor, whitelist ... string) error {
|
||||||
if o == nil {
|
if o == nil {
|
||||||
return errors.New("{{.PkgName}}: no {{.Table.Name}} provided for insertion")
|
return errors.Err("{{.PkgName}}: no {{.Table.Name}} provided for insertion")
|
||||||
}
|
}
|
||||||
|
|
||||||
var err error
|
var err error
|
||||||
|
@ -37,7 +37,7 @@ func (o *{{$tableNameSingular}}) Insert(exec boil.Executor, whitelist ... string
|
||||||
|
|
||||||
{{if not .NoHooks -}}
|
{{if not .NoHooks -}}
|
||||||
if err := o.doBeforeInsertHooks(exec); err != nil {
|
if err := o.doBeforeInsertHooks(exec); err != nil {
|
||||||
return err
|
return errors.Err(err)
|
||||||
}
|
}
|
||||||
{{- end}}
|
{{- end}}
|
||||||
|
|
||||||
|
@ -59,20 +59,38 @@ func (o *{{$tableNameSingular}}) Insert(exec boil.Executor, whitelist ... string
|
||||||
|
|
||||||
cache.valueMapping, err = queries.BindMapping({{$varNameSingular}}Type, {{$varNameSingular}}Mapping, wl)
|
cache.valueMapping, err = queries.BindMapping({{$varNameSingular}}Type, {{$varNameSingular}}Mapping, wl)
|
||||||
if err != nil {
|
if err != nil {
|
||||||
return err
|
return errors.Err(err)
|
||||||
}
|
}
|
||||||
cache.retMapping, err = queries.BindMapping({{$varNameSingular}}Type, {{$varNameSingular}}Mapping, returnColumns)
|
cache.retMapping, err = queries.BindMapping({{$varNameSingular}}Type, {{$varNameSingular}}Mapping, returnColumns)
|
||||||
if err != nil {
|
if err != nil {
|
||||||
return err
|
return errors.Err(err)
|
||||||
}
|
}
|
||||||
cache.query = fmt.Sprintf("INSERT INTO {{$schemaTable}} ({{.LQ}}%s{{.RQ}}) VALUES (%s)", strings.Join(wl, "{{.LQ}},{{.RQ}}"), strmangle.Placeholders(dialect.IndexPlaceholders, len(wl), 1, 1))
|
if len(wl) != 0 {
|
||||||
|
cache.query = fmt.Sprintf("INSERT INTO {{$schemaTable}} ({{.LQ}}%s{{.RQ}}) %%sVALUES (%s)%%s", strings.Join(wl, "{{.RQ}},{{.LQ}}"), strmangle.Placeholders(dialect.IndexPlaceholders, len(wl), 1, 1))
|
||||||
|
} else {
|
||||||
|
{{if eq .DriverName "mysql" -}}
|
||||||
|
cache.query = "INSERT INTO {{$schemaTable}} () VALUES ()"
|
||||||
|
{{else -}}
|
||||||
|
cache.query = "INSERT INTO {{$schemaTable}} DEFAULT VALUES"
|
||||||
|
{{end -}}
|
||||||
|
}
|
||||||
|
|
||||||
|
var queryOutput, queryReturning string
|
||||||
|
|
||||||
if len(cache.retMapping) != 0 {
|
if len(cache.retMapping) != 0 {
|
||||||
{{if .UseLastInsertID -}}
|
{{if .UseLastInsertID -}}
|
||||||
cache.retQuery = fmt.Sprintf("SELECT {{.LQ}}%s{{.RQ}} FROM {{$schemaTable}} WHERE %s", strings.Join(returnColumns, "{{.LQ}},{{.RQ}}"), strmangle.WhereClause("{{.LQ}}", "{{.RQ}}", {{if .Dialect.IndexPlaceholders}}1{{else}}0{{end}}, {{$varNameSingular}}PrimaryKeyColumns))
|
cache.retQuery = fmt.Sprintf("SELECT {{.LQ}}%s{{.RQ}} FROM {{$schemaTable}} WHERE %s", strings.Join(returnColumns, "{{.RQ}},{{.LQ}}"), strmangle.WhereClause("{{.LQ}}", "{{.RQ}}", {{if .Dialect.IndexPlaceholders}}1{{else}}0{{end}}, {{$varNameSingular}}PrimaryKeyColumns))
|
||||||
{{else -}}
|
{{else -}}
|
||||||
cache.query += fmt.Sprintf(" RETURNING {{.LQ}}%s{{.RQ}}", strings.Join(returnColumns, "{{.LQ}},{{.RQ}}"))
|
{{if ne .DriverName "mssql" -}}
|
||||||
|
queryReturning = fmt.Sprintf(" RETURNING {{.LQ}}%s{{.RQ}}", strings.Join(returnColumns, "{{.RQ}},{{.LQ}}"))
|
||||||
|
{{else -}}
|
||||||
|
queryOutput = fmt.Sprintf("OUTPUT INSERTED.{{.LQ}}%s{{.RQ}} ", strings.Join(returnColumns, "{{.RQ}},INSERTED.{{.LQ}}"))
|
||||||
{{end -}}
|
{{end -}}
|
||||||
|
{{end -}}
|
||||||
|
}
|
||||||
|
|
||||||
|
if len(wl) != 0 {
|
||||||
|
cache.query = fmt.Sprintf(cache.query, queryOutput, queryReturning)
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -92,7 +110,7 @@ func (o *{{$tableNameSingular}}) Insert(exec boil.Executor, whitelist ... string
|
||||||
_, err = exec.Exec(cache.query, vals...)
|
_, err = exec.Exec(cache.query, vals...)
|
||||||
{{- end}}
|
{{- end}}
|
||||||
if err != nil {
|
if err != nil {
|
||||||
return errors.Wrap(err, "{{.PkgName}}: unable to insert into {{.Table.Name}}")
|
return errors.Prefix("{{.PkgName}}: unable to insert into {{.Table.Name}}", err)
|
||||||
}
|
}
|
||||||
|
|
||||||
{{if $canLastInsertID -}}
|
{{if $canLastInsertID -}}
|
||||||
|
@ -107,7 +125,7 @@ func (o *{{$tableNameSingular}}) Insert(exec boil.Executor, whitelist ... string
|
||||||
{{if $canLastInsertID -}}
|
{{if $canLastInsertID -}}
|
||||||
lastID, err = result.LastInsertId()
|
lastID, err = result.LastInsertId()
|
||||||
if err != nil {
|
if err != nil {
|
||||||
return ErrSyncFail
|
return errors.Err(ErrSyncFail)
|
||||||
}
|
}
|
||||||
|
|
||||||
{{$colName := index .Table.PKey.Columns 0 -}}
|
{{$colName := index .Table.PKey.Columns 0 -}}
|
||||||
|
@ -132,7 +150,7 @@ func (o *{{$tableNameSingular}}) Insert(exec boil.Executor, whitelist ... string
|
||||||
|
|
||||||
err = exec.QueryRow(cache.retQuery, identifierCols...).Scan(queries.PtrsFromMapping(value, cache.retMapping)...)
|
err = exec.QueryRow(cache.retQuery, identifierCols...).Scan(queries.PtrsFromMapping(value, cache.retMapping)...)
|
||||||
if err != nil {
|
if err != nil {
|
||||||
return errors.Wrap(err, "{{.PkgName}}: unable to populate default values for {{.Table.Name}}")
|
return errors.Prefix("{{.PkgName}}: unable to populate default values for {{.Table.Name}}", err)
|
||||||
}
|
}
|
||||||
{{else}}
|
{{else}}
|
||||||
if len(cache.retMapping) != 0 {
|
if len(cache.retMapping) != 0 {
|
||||||
|
@ -142,7 +160,7 @@ func (o *{{$tableNameSingular}}) Insert(exec boil.Executor, whitelist ... string
|
||||||
}
|
}
|
||||||
|
|
||||||
if err != nil {
|
if err != nil {
|
||||||
return errors.Wrap(err, "{{.PkgName}}: unable to insert into {{.Table.Name}}")
|
return errors.Prefix("{{.PkgName}}: unable to insert into {{.Table.Name}}", err)
|
||||||
}
|
}
|
||||||
{{end}}
|
{{end}}
|
||||||
|
|
||||||
|
|
|
@ -12,7 +12,7 @@ func (o *{{$tableNameSingular}}) UpdateG(whitelist ...string) error {
|
||||||
// Panics on error. See Update for whitelist behavior description.
|
// Panics on error. See Update for whitelist behavior description.
|
||||||
func (o *{{$tableNameSingular}}) UpdateGP(whitelist ...string) {
|
func (o *{{$tableNameSingular}}) UpdateGP(whitelist ...string) {
|
||||||
if err := o.Update(boil.GetDB(), whitelist...); err != nil {
|
if err := o.Update(boil.GetDB(), whitelist...); err != nil {
|
||||||
panic(boil.WrapErr(err))
|
panic(errors.Err(err))
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -21,7 +21,7 @@ func (o *{{$tableNameSingular}}) UpdateGP(whitelist ...string) {
|
||||||
func (o *{{$tableNameSingular}}) UpdateP(exec boil.Executor, whitelist ... string) {
|
func (o *{{$tableNameSingular}}) UpdateP(exec boil.Executor, whitelist ... string) {
|
||||||
err := o.Update(exec, whitelist...)
|
err := o.Update(exec, whitelist...)
|
||||||
if err != nil {
|
if err != nil {
|
||||||
panic(boil.WrapErr(err))
|
panic(errors.Err(err))
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -38,7 +38,7 @@ func (o *{{$tableNameSingular}}) Update(exec boil.Executor, whitelist ... string
|
||||||
var err error
|
var err error
|
||||||
{{if not .NoHooks -}}
|
{{if not .NoHooks -}}
|
||||||
if err = o.doBeforeUpdateHooks(exec); err != nil {
|
if err = o.doBeforeUpdateHooks(exec); err != nil {
|
||||||
return err
|
return errors.Err(err)
|
||||||
}
|
}
|
||||||
{{end -}}
|
{{end -}}
|
||||||
|
|
||||||
|
@ -48,9 +48,19 @@ func (o *{{$tableNameSingular}}) Update(exec boil.Executor, whitelist ... string
|
||||||
{{$varNameSingular}}UpdateCacheMut.RUnlock()
|
{{$varNameSingular}}UpdateCacheMut.RUnlock()
|
||||||
|
|
||||||
if !cached {
|
if !cached {
|
||||||
wl := strmangle.UpdateColumnSet({{$varNameSingular}}Columns, {{$varNameSingular}}PrimaryKeyColumns, whitelist)
|
wl := strmangle.UpdateColumnSet(
|
||||||
|
{{$varNameSingular}}Columns,
|
||||||
|
{{$varNameSingular}}PrimaryKeyColumns,
|
||||||
|
whitelist,
|
||||||
|
)
|
||||||
|
{{if eq .DriverName "mssql"}}
|
||||||
|
wl = strmangle.SetComplement(wl, {{$varNameSingular}}ColumnsWithAuto)
|
||||||
|
{{end}}
|
||||||
|
if len(whitelist) == 0 {
|
||||||
|
wl = strmangle.SetComplement(wl, []string{"created_at","updated_at"})
|
||||||
|
}
|
||||||
if len(wl) == 0 {
|
if len(wl) == 0 {
|
||||||
return errors.New("{{.PkgName}}: unable to update {{.Table.Name}}, could not build whitelist")
|
return errors.Err("{{.PkgName}}: unable to update {{.Table.Name}}, could not build whitelist")
|
||||||
}
|
}
|
||||||
|
|
||||||
cache.query = fmt.Sprintf("UPDATE {{$schemaTable}} SET %s WHERE %s",
|
cache.query = fmt.Sprintf("UPDATE {{$schemaTable}} SET %s WHERE %s",
|
||||||
|
@ -59,7 +69,7 @@ func (o *{{$tableNameSingular}}) Update(exec boil.Executor, whitelist ... string
|
||||||
)
|
)
|
||||||
cache.valueMapping, err = queries.BindMapping({{$varNameSingular}}Type, {{$varNameSingular}}Mapping, append(wl, {{$varNameSingular}}PrimaryKeyColumns...))
|
cache.valueMapping, err = queries.BindMapping({{$varNameSingular}}Type, {{$varNameSingular}}Mapping, append(wl, {{$varNameSingular}}PrimaryKeyColumns...))
|
||||||
if err != nil {
|
if err != nil {
|
||||||
return err
|
return errors.Err(err)
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -72,7 +82,7 @@ func (o *{{$tableNameSingular}}) Update(exec boil.Executor, whitelist ... string
|
||||||
|
|
||||||
_, err = exec.Exec(cache.query, values...)
|
_, err = exec.Exec(cache.query, values...)
|
||||||
if err != nil {
|
if err != nil {
|
||||||
return errors.Wrap(err, "{{.PkgName}}: unable to update {{.Table.Name}} row")
|
return errors.Prefix("{{.PkgName}}: unable to update {{.Table.Name}} row", err)
|
||||||
}
|
}
|
||||||
|
|
||||||
if !cached {
|
if !cached {
|
||||||
|
@ -89,19 +99,19 @@ func (o *{{$tableNameSingular}}) Update(exec boil.Executor, whitelist ... string
|
||||||
}
|
}
|
||||||
|
|
||||||
// UpdateAllP updates all rows with matching column names, and panics on error.
|
// UpdateAllP updates all rows with matching column names, and panics on error.
|
||||||
func (q {{$varNameSingular}}Query) UpdateAllP(cols M) {
|
func (q {{$tableNameSingular}}Query) UpdateAllP(cols M) {
|
||||||
if err := q.UpdateAll(cols); err != nil {
|
if err := q.UpdateAll(cols); err != nil {
|
||||||
panic(boil.WrapErr(err))
|
panic(errors.Err(err))
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
// UpdateAll updates all rows with the specified column values.
|
// UpdateAll updates all rows with the specified column values.
|
||||||
func (q {{$varNameSingular}}Query) UpdateAll(cols M) error {
|
func (q {{$tableNameSingular}}Query) UpdateAll(cols M) error {
|
||||||
queries.SetUpdate(q.Query, cols)
|
queries.SetUpdate(q.Query, cols)
|
||||||
|
|
||||||
_, err := q.Query.Exec()
|
_, err := q.Query.Exec()
|
||||||
if err != nil {
|
if err != nil {
|
||||||
return errors.Wrap(err, "{{.PkgName}}: unable to update all for {{.Table.Name}}")
|
return errors.Prefix("{{.PkgName}}: unable to update all for {{.Table.Name}}", err)
|
||||||
}
|
}
|
||||||
|
|
||||||
return nil
|
return nil
|
||||||
|
@ -115,14 +125,14 @@ func (o {{$tableNameSingular}}Slice) UpdateAllG(cols M) error {
|
||||||
// UpdateAllGP updates all rows with the specified column values, and panics on error.
|
// UpdateAllGP updates all rows with the specified column values, and panics on error.
|
||||||
func (o {{$tableNameSingular}}Slice) UpdateAllGP(cols M) {
|
func (o {{$tableNameSingular}}Slice) UpdateAllGP(cols M) {
|
||||||
if err := o.UpdateAll(boil.GetDB(), cols); err != nil {
|
if err := o.UpdateAll(boil.GetDB(), cols); err != nil {
|
||||||
panic(boil.WrapErr(err))
|
panic(errors.Err(err))
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
// UpdateAllP updates all rows with the specified column values, and panics on error.
|
// UpdateAllP updates all rows with the specified column values, and panics on error.
|
||||||
func (o {{$tableNameSingular}}Slice) UpdateAllP(exec boil.Executor, cols M) {
|
func (o {{$tableNameSingular}}Slice) UpdateAllP(exec boil.Executor, cols M) {
|
||||||
if err := o.UpdateAll(exec, cols); err != nil {
|
if err := o.UpdateAll(exec, cols); err != nil {
|
||||||
panic(boil.WrapErr(err))
|
panic(errors.Err(err))
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -134,7 +144,7 @@ func (o {{$tableNameSingular}}Slice) UpdateAll(exec boil.Executor, cols M) error
|
||||||
}
|
}
|
||||||
|
|
||||||
if len(cols) == 0 {
|
if len(cols) == 0 {
|
||||||
return errors.New("{{.PkgName}}: update all requires at least one column argument")
|
return errors.Err("{{.PkgName}}: update all requires at least one column argument")
|
||||||
}
|
}
|
||||||
|
|
||||||
colNames := make([]string, len(cols))
|
colNames := make([]string, len(cols))
|
||||||
|
@ -153,11 +163,9 @@ func (o {{$tableNameSingular}}Slice) UpdateAll(exec boil.Executor, cols M) error
|
||||||
args = append(args, pkeyArgs...)
|
args = append(args, pkeyArgs...)
|
||||||
}
|
}
|
||||||
|
|
||||||
sql := fmt.Sprintf(
|
sql := fmt.Sprintf("UPDATE {{$schemaTable}} SET %s WHERE %s",
|
||||||
"UPDATE {{$schemaTable}} SET %s WHERE ({{.LQ}}{{.Table.PKey.Columns | join (printf "%s,%s" .LQ .RQ)}}{{.RQ}}) IN (%s)",
|
|
||||||
strmangle.SetParamNames("{{.LQ}}", "{{.RQ}}", {{if .Dialect.IndexPlaceholders}}1{{else}}0{{end}}, colNames),
|
strmangle.SetParamNames("{{.LQ}}", "{{.RQ}}", {{if .Dialect.IndexPlaceholders}}1{{else}}0{{end}}, colNames),
|
||||||
strmangle.Placeholders(dialect.IndexPlaceholders, len(o) * len({{$varNameSingular}}PrimaryKeyColumns), len(colNames)+1, len({{$varNameSingular}}PrimaryKeyColumns)),
|
strmangle.WhereClauseRepeated(string(dialect.LQ), string(dialect.RQ), {{if .Dialect.IndexPlaceholders}}len(colNames)+1{{else}}0{{end}}, {{$varNameSingular}}PrimaryKeyColumns, len(o)))
|
||||||
)
|
|
||||||
|
|
||||||
if boil.DebugMode {
|
if boil.DebugMode {
|
||||||
fmt.Fprintln(boil.DebugWriter, sql)
|
fmt.Fprintln(boil.DebugWriter, sql)
|
||||||
|
@ -166,7 +174,7 @@ func (o {{$tableNameSingular}}Slice) UpdateAll(exec boil.Executor, cols M) error
|
||||||
|
|
||||||
_, err := exec.Exec(sql, args...)
|
_, err := exec.Exec(sql, args...)
|
||||||
if err != nil {
|
if err != nil {
|
||||||
return errors.Wrap(err, "{{.PkgName}}: unable to update all in {{$varNameSingular}} slice")
|
return errors.Prefix("{{.PkgName}}: unable to update all in {{$varNameSingular}} slice", err)
|
||||||
}
|
}
|
||||||
|
|
||||||
return nil
|
return nil
|
||||||
|
|
|
@ -2,36 +2,36 @@
|
||||||
{{- $varNameSingular := .Table.Name | singular | camelCase -}}
|
{{- $varNameSingular := .Table.Name | singular | camelCase -}}
|
||||||
{{- $schemaTable := .Table.Name | .SchemaTable}}
|
{{- $schemaTable := .Table.Name | .SchemaTable}}
|
||||||
// UpsertG attempts an insert, and does an update or ignore on conflict.
|
// UpsertG attempts an insert, and does an update or ignore on conflict.
|
||||||
func (o *{{$tableNameSingular}}) UpsertG({{if ne .DriverName "mysql"}}updateOnConflict bool, conflictColumns []string, {{end}}updateColumns []string, whitelist ...string) error {
|
func (o *{{$tableNameSingular}}) UpsertG({{if eq .DriverName "postgres"}}updateOnConflict bool, conflictColumns []string, {{end}}updateColumns []string, whitelist ...string) error {
|
||||||
return o.Upsert(boil.GetDB(), {{if ne .DriverName "mysql"}}updateOnConflict, conflictColumns, {{end}}updateColumns, whitelist...)
|
return o.Upsert(boil.GetDB(), {{if eq .DriverName "postgres"}}updateOnConflict, conflictColumns, {{end}}updateColumns, whitelist...)
|
||||||
}
|
}
|
||||||
|
|
||||||
// UpsertGP attempts an insert, and does an update or ignore on conflict. Panics on error.
|
// UpsertGP attempts an insert, and does an update or ignore on conflict. Panics on error.
|
||||||
func (o *{{$tableNameSingular}}) UpsertGP({{if ne .DriverName "mysql"}}updateOnConflict bool, conflictColumns []string, {{end}}updateColumns []string, whitelist ...string) {
|
func (o *{{$tableNameSingular}}) UpsertGP({{if eq .DriverName "postgres"}}updateOnConflict bool, conflictColumns []string, {{end}}updateColumns []string, whitelist ...string) {
|
||||||
if err := o.Upsert(boil.GetDB(), {{if ne .DriverName "mysql"}}updateOnConflict, conflictColumns, {{end}}updateColumns, whitelist...); err != nil {
|
if err := o.Upsert(boil.GetDB(), {{if eq .DriverName "postgres"}}updateOnConflict, conflictColumns, {{end}}updateColumns, whitelist...); err != nil {
|
||||||
panic(boil.WrapErr(err))
|
panic(errors.Err(err))
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
// UpsertP attempts an insert using an executor, and does an update or ignore on conflict.
|
// UpsertP attempts an insert using an executor, and does an update or ignore on conflict.
|
||||||
// UpsertP panics on error.
|
// UpsertP panics on error.
|
||||||
func (o *{{$tableNameSingular}}) UpsertP(exec boil.Executor, {{if ne .DriverName "mysql"}}updateOnConflict bool, conflictColumns []string, {{end}}updateColumns []string, whitelist ...string) {
|
func (o *{{$tableNameSingular}}) UpsertP(exec boil.Executor, {{if eq .DriverName "postgres"}}updateOnConflict bool, conflictColumns []string, {{end}}updateColumns []string, whitelist ...string) {
|
||||||
if err := o.Upsert(exec, {{if ne .DriverName "mysql"}}updateOnConflict, conflictColumns, {{end}}updateColumns, whitelist...); err != nil {
|
if err := o.Upsert(exec, {{if eq .DriverName "postgres"}}updateOnConflict, conflictColumns, {{end}}updateColumns, whitelist...); err != nil {
|
||||||
panic(boil.WrapErr(err))
|
panic(errors.Err(err))
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
// Upsert attempts an insert using an executor, and does an update or ignore on conflict.
|
// Upsert attempts an insert using an executor, and does an update or ignore on conflict.
|
||||||
func (o *{{$tableNameSingular}}) Upsert(exec boil.Executor, {{if ne .DriverName "mysql"}}updateOnConflict bool, conflictColumns []string, {{end}}updateColumns []string, whitelist ...string) error {
|
func (o *{{$tableNameSingular}}) Upsert(exec boil.Executor, {{if eq .DriverName "postgres"}}updateOnConflict bool, conflictColumns []string, {{end}}updateColumns []string, whitelist ...string) error {
|
||||||
if o == nil {
|
if o == nil {
|
||||||
return errors.New("{{.PkgName}}: no {{.Table.Name}} provided for upsert")
|
return errors.Err("{{.PkgName}}: no {{.Table.Name}} provided for upsert")
|
||||||
}
|
}
|
||||||
|
|
||||||
{{- template "timestamp_upsert_helper" . }}
|
{{- template "timestamp_upsert_helper" . }}
|
||||||
|
|
||||||
{{if not .NoHooks -}}
|
{{if not .NoHooks -}}
|
||||||
if err := o.doBeforeUpsertHooks(exec); err != nil {
|
if err := o.doBeforeUpsertHooks(exec); err != nil {
|
||||||
return err
|
return errors.Err(err)
|
||||||
}
|
}
|
||||||
{{- end}}
|
{{- end}}
|
||||||
|
|
||||||
|
@ -39,7 +39,7 @@ func (o *{{$tableNameSingular}}) Upsert(exec boil.Executor, {{if ne .DriverName
|
||||||
|
|
||||||
// Build cache key in-line uglily - mysql vs postgres problems
|
// Build cache key in-line uglily - mysql vs postgres problems
|
||||||
buf := strmangle.GetBuffer()
|
buf := strmangle.GetBuffer()
|
||||||
{{if ne .DriverName "mysql" -}}
|
{{if eq .DriverName "postgres"}}
|
||||||
if updateOnConflict {
|
if updateOnConflict {
|
||||||
buf.WriteByte('t')
|
buf.WriteByte('t')
|
||||||
} else {
|
} else {
|
||||||
|
@ -72,46 +72,69 @@ func (o *{{$tableNameSingular}}) Upsert(exec boil.Executor, {{if ne .DriverName
|
||||||
var err error
|
var err error
|
||||||
|
|
||||||
if !cached {
|
if !cached {
|
||||||
var ret []string
|
insert, ret := strmangle.InsertColumnSet(
|
||||||
whitelist, ret = strmangle.InsertColumnSet(
|
|
||||||
{{$varNameSingular}}Columns,
|
{{$varNameSingular}}Columns,
|
||||||
{{$varNameSingular}}ColumnsWithDefault,
|
{{$varNameSingular}}ColumnsWithDefault,
|
||||||
{{$varNameSingular}}ColumnsWithoutDefault,
|
{{$varNameSingular}}ColumnsWithoutDefault,
|
||||||
nzDefaults,
|
nzDefaults,
|
||||||
whitelist,
|
whitelist,
|
||||||
)
|
)
|
||||||
|
{{if eq .DriverName "mssql" -}}
|
||||||
|
insert = strmangle.SetComplement(insert, {{$varNameSingular}}ColumnsWithAuto)
|
||||||
|
for i, v := range insert {
|
||||||
|
if strmangle.ContainsAny({{$varNameSingular}}PrimaryKeyColumns, v) && strmangle.ContainsAny({{$varNameSingular}}ColumnsWithDefault, v) {
|
||||||
|
insert = append(insert[:i], insert[i+1:]...)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
if len(insert) == 0 {
|
||||||
|
return errors.Err("{{.PkgName}}: unable to upsert {{.Table.Name}}, could not build insert column list")
|
||||||
|
}
|
||||||
|
|
||||||
|
ret = strmangle.SetMerge(ret, {{$varNameSingular}}ColumnsWithAuto)
|
||||||
|
ret = strmangle.SetMerge(ret, {{$varNameSingular}}ColumnsWithDefault)
|
||||||
|
|
||||||
|
{{end}}
|
||||||
update := strmangle.UpdateColumnSet(
|
update := strmangle.UpdateColumnSet(
|
||||||
{{$varNameSingular}}Columns,
|
{{$varNameSingular}}Columns,
|
||||||
{{$varNameSingular}}PrimaryKeyColumns,
|
{{$varNameSingular}}PrimaryKeyColumns,
|
||||||
updateColumns,
|
updateColumns,
|
||||||
)
|
)
|
||||||
|
{{if eq .DriverName "mssql" -}}
|
||||||
|
update = strmangle.SetComplement(update, {{$varNameSingular}}ColumnsWithAuto)
|
||||||
|
{{end -}}
|
||||||
|
|
||||||
if len(update) == 0 {
|
if len(update) == 0 {
|
||||||
return errors.New("{{.PkgName}}: unable to upsert {{.Table.Name}}, could not build update column list")
|
return errors.Err("{{.PkgName}}: unable to upsert {{.Table.Name}}, could not build update column list")
|
||||||
}
|
}
|
||||||
|
|
||||||
{{if ne .DriverName "mysql" -}}
|
{{if eq .DriverName "postgres"}}
|
||||||
conflict := conflictColumns
|
conflict := conflictColumns
|
||||||
if len(conflict) == 0 {
|
if len(conflict) == 0 {
|
||||||
conflict = make([]string, len({{$varNameSingular}}PrimaryKeyColumns))
|
conflict = make([]string, len({{$varNameSingular}}PrimaryKeyColumns))
|
||||||
copy(conflict, {{$varNameSingular}}PrimaryKeyColumns)
|
copy(conflict, {{$varNameSingular}}PrimaryKeyColumns)
|
||||||
}
|
}
|
||||||
cache.query = queries.BuildUpsertQueryPostgres(dialect, "{{$schemaTable}}", updateOnConflict, ret, update, conflict, whitelist)
|
cache.query = queries.BuildUpsertQueryPostgres(dialect, "{{$schemaTable}}", updateOnConflict, ret, update, conflict, insert)
|
||||||
{{- else -}}
|
{{else if eq .DriverName "mysql"}}
|
||||||
cache.query = queries.BuildUpsertQueryMySQL(dialect, "{{.Table.Name}}", update, whitelist)
|
cache.query = queries.BuildUpsertQueryMySQL(dialect, "{{.Table.Name}}", update, insert, {{$varNameSingular}}AutoIncrementColumn)
|
||||||
cache.retQuery = fmt.Sprintf(
|
cache.retQuery = fmt.Sprintf(
|
||||||
"SELECT %s FROM {{.LQ}}{{.Table.Name}}{{.RQ}} WHERE {{whereClause .LQ .RQ 0 .Table.PKey.Columns}}",
|
"SELECT %s FROM {{.LQ}}{{.Table.Name}}{{.RQ}} WHERE {{whereClause .LQ .RQ 0 .Table.PKey.Columns}}",
|
||||||
strings.Join(strmangle.IdentQuoteSlice(dialect.LQ, dialect.RQ, ret), ","),
|
strings.Join(strmangle.IdentQuoteSlice(dialect.LQ, dialect.RQ, ret), ","),
|
||||||
)
|
)
|
||||||
|
{{else if eq .DriverName "mssql"}}
|
||||||
|
cache.query = queries.BuildUpsertQueryMSSQL(dialect, "{{.Table.Name}}", {{$varNameSingular}}PrimaryKeyColumns, update, insert, ret)
|
||||||
|
|
||||||
|
whitelist = append({{$varNameSingular}}PrimaryKeyColumns, update...)
|
||||||
|
whitelist = append(whitelist, insert...)
|
||||||
{{- end}}
|
{{- end}}
|
||||||
|
|
||||||
cache.valueMapping, err = queries.BindMapping({{$varNameSingular}}Type, {{$varNameSingular}}Mapping, whitelist)
|
cache.valueMapping, err = queries.BindMapping({{$varNameSingular}}Type, {{$varNameSingular}}Mapping, {{if eq .DriverName "mssql"}}whitelist{{else}}insert{{end}})
|
||||||
if err != nil {
|
if err != nil {
|
||||||
return err
|
return errors.Err(err)
|
||||||
}
|
}
|
||||||
if len(ret) != 0 {
|
if len(ret) != 0 {
|
||||||
cache.retMapping, err = queries.BindMapping({{$varNameSingular}}Type, {{$varNameSingular}}Mapping, ret)
|
cache.retMapping, err = queries.BindMapping({{$varNameSingular}}Type, {{$varNameSingular}}Mapping, ret)
|
||||||
if err != nil {
|
if err != nil {
|
||||||
return err
|
return errors.Err(err)
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
@ -136,7 +159,7 @@ func (o *{{$tableNameSingular}}) Upsert(exec boil.Executor, {{if ne .DriverName
|
||||||
_, err = exec.Exec(cache.query, vals...)
|
_, err = exec.Exec(cache.query, vals...)
|
||||||
{{- end}}
|
{{- end}}
|
||||||
if err != nil {
|
if err != nil {
|
||||||
return errors.Wrap(err, "{{.PkgName}}: unable to upsert for {{.Table.Name}}")
|
return errors.Prefix("{{.PkgName}}: unable to upsert for {{.Table.Name}}", err)
|
||||||
}
|
}
|
||||||
|
|
||||||
{{if $canLastInsertID -}}
|
{{if $canLastInsertID -}}
|
||||||
|
@ -151,7 +174,7 @@ func (o *{{$tableNameSingular}}) Upsert(exec boil.Executor, {{if ne .DriverName
|
||||||
{{if $canLastInsertID -}}
|
{{if $canLastInsertID -}}
|
||||||
lastID, err = result.LastInsertId()
|
lastID, err = result.LastInsertId()
|
||||||
if err != nil {
|
if err != nil {
|
||||||
return ErrSyncFail
|
return errors.Err(ErrSyncFail)
|
||||||
}
|
}
|
||||||
|
|
||||||
{{$colName := index .Table.PKey.Columns 0 -}}
|
{{$colName := index .Table.PKey.Columns 0 -}}
|
||||||
|
@ -176,16 +199,19 @@ func (o *{{$tableNameSingular}}) Upsert(exec boil.Executor, {{if ne .DriverName
|
||||||
|
|
||||||
err = exec.QueryRow(cache.retQuery, identifierCols...).Scan(returns...)
|
err = exec.QueryRow(cache.retQuery, identifierCols...).Scan(returns...)
|
||||||
if err != nil {
|
if err != nil {
|
||||||
return errors.Wrap(err, "{{.PkgName}}: unable to populate default values for {{.Table.Name}}")
|
return errors.Prefix("{{.PkgName}}: unable to populate default values for {{.Table.Name}}", err)
|
||||||
}
|
}
|
||||||
{{- else}}
|
{{- else}}
|
||||||
if len(cache.retMapping) != 0 {
|
if len(cache.retMapping) != 0 {
|
||||||
err = exec.QueryRow(cache.query, vals...).Scan(returns...)
|
err = exec.QueryRow(cache.query, vals...).Scan(returns...)
|
||||||
|
if err == sql.ErrNoRows {
|
||||||
|
err = nil // Postgres doesn't return anything when there's no update
|
||||||
|
}
|
||||||
} else {
|
} else {
|
||||||
_, err = exec.Exec(cache.query, vals...)
|
_, err = exec.Exec(cache.query, vals...)
|
||||||
}
|
}
|
||||||
if err != nil {
|
if err != nil {
|
||||||
return errors.Wrap(err, "{{.PkgName}}: unable to upsert for {{.Table.Name}}")
|
return errors.Prefix("{{.PkgName}}: unable to upsert {{.Table.Name}}", err)
|
||||||
}
|
}
|
||||||
{{- end}}
|
{{- end}}
|
||||||
|
|
||||||
|
|
|
@ -6,7 +6,7 @@
|
||||||
// Panics on error.
|
// Panics on error.
|
||||||
func (o *{{$tableNameSingular}}) DeleteP(exec boil.Executor) {
|
func (o *{{$tableNameSingular}}) DeleteP(exec boil.Executor) {
|
||||||
if err := o.Delete(exec); err != nil {
|
if err := o.Delete(exec); err != nil {
|
||||||
panic(boil.WrapErr(err))
|
panic(errors.Err(err))
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -14,7 +14,7 @@ func (o *{{$tableNameSingular}}) DeleteP(exec boil.Executor) {
|
||||||
// DeleteG will match against the primary key column to find the record to delete.
|
// DeleteG will match against the primary key column to find the record to delete.
|
||||||
func (o *{{$tableNameSingular}}) DeleteG() error {
|
func (o *{{$tableNameSingular}}) DeleteG() error {
|
||||||
if o == nil {
|
if o == nil {
|
||||||
return errors.New("{{.PkgName}}: no {{$tableNameSingular}} provided for deletion")
|
return errors.Err("{{.PkgName}}: no {{$tableNameSingular}} provided for deletion")
|
||||||
}
|
}
|
||||||
|
|
||||||
return o.Delete(boil.GetDB())
|
return o.Delete(boil.GetDB())
|
||||||
|
@ -25,7 +25,7 @@ func (o *{{$tableNameSingular}}) DeleteG() error {
|
||||||
// Panics on error.
|
// Panics on error.
|
||||||
func (o *{{$tableNameSingular}}) DeleteGP() {
|
func (o *{{$tableNameSingular}}) DeleteGP() {
|
||||||
if err := o.DeleteG(); err != nil {
|
if err := o.DeleteG(); err != nil {
|
||||||
panic(boil.WrapErr(err))
|
panic(errors.Err(err))
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -33,12 +33,12 @@ func (o *{{$tableNameSingular}}) DeleteGP() {
|
||||||
// Delete will match against the primary key column to find the record to delete.
|
// Delete will match against the primary key column to find the record to delete.
|
||||||
func (o *{{$tableNameSingular}}) Delete(exec boil.Executor) error {
|
func (o *{{$tableNameSingular}}) Delete(exec boil.Executor) error {
|
||||||
if o == nil {
|
if o == nil {
|
||||||
return errors.New("{{.PkgName}}: no {{$tableNameSingular}} provided for delete")
|
return errors.Err("{{.PkgName}}: no {{$tableNameSingular}} provided for delete")
|
||||||
}
|
}
|
||||||
|
|
||||||
{{if not .NoHooks -}}
|
{{if not .NoHooks -}}
|
||||||
if err := o.doBeforeDeleteHooks(exec); err != nil {
|
if err := o.doBeforeDeleteHooks(exec); err != nil {
|
||||||
return err
|
return errors.Err(err)
|
||||||
}
|
}
|
||||||
{{- end}}
|
{{- end}}
|
||||||
|
|
||||||
|
@ -52,12 +52,12 @@ func (o *{{$tableNameSingular}}) Delete(exec boil.Executor) error {
|
||||||
|
|
||||||
_, err := exec.Exec(sql, args...)
|
_, err := exec.Exec(sql, args...)
|
||||||
if err != nil {
|
if err != nil {
|
||||||
return errors.Wrap(err, "{{.PkgName}}: unable to delete from {{.Table.Name}}")
|
return errors.Prefix("{{.PkgName}}: unable to delete from {{.Table.Name}}", err)
|
||||||
}
|
}
|
||||||
|
|
||||||
{{if not .NoHooks -}}
|
{{if not .NoHooks -}}
|
||||||
if err := o.doAfterDeleteHooks(exec); err != nil {
|
if err := o.doAfterDeleteHooks(exec); err != nil {
|
||||||
return err
|
return errors.Err(err)
|
||||||
}
|
}
|
||||||
{{- end}}
|
{{- end}}
|
||||||
|
|
||||||
|
@ -65,23 +65,23 @@ func (o *{{$tableNameSingular}}) Delete(exec boil.Executor) error {
|
||||||
}
|
}
|
||||||
|
|
||||||
// DeleteAllP deletes all rows, and panics on error.
|
// DeleteAllP deletes all rows, and panics on error.
|
||||||
func (q {{$varNameSingular}}Query) DeleteAllP() {
|
func (q {{$tableNameSingular}}Query) DeleteAllP() {
|
||||||
if err := q.DeleteAll(); err != nil {
|
if err := q.DeleteAll(); err != nil {
|
||||||
panic(boil.WrapErr(err))
|
panic(errors.Err(err))
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
// DeleteAll deletes all matching rows.
|
// DeleteAll deletes all matching rows.
|
||||||
func (q {{$varNameSingular}}Query) DeleteAll() error {
|
func (q {{$tableNameSingular}}Query) DeleteAll() error {
|
||||||
if q.Query == nil {
|
if q.Query == nil {
|
||||||
return errors.New("{{.PkgName}}: no {{$varNameSingular}}Query provided for delete all")
|
return errors.Err("{{.PkgName}}: no {{$tableNameSingular}}Query provided for delete all")
|
||||||
}
|
}
|
||||||
|
|
||||||
queries.SetDelete(q.Query)
|
queries.SetDelete(q.Query)
|
||||||
|
|
||||||
_, err := q.Query.Exec()
|
_, err := q.Query.Exec()
|
||||||
if err != nil {
|
if err != nil {
|
||||||
return errors.Wrap(err, "{{.PkgName}}: unable to delete all from {{.Table.Name}}")
|
return errors.Prefix("{{.PkgName}}: unable to delete all from {{.Table.Name}}", err)
|
||||||
}
|
}
|
||||||
|
|
||||||
return nil
|
return nil
|
||||||
|
@ -90,14 +90,14 @@ func (q {{$varNameSingular}}Query) DeleteAll() error {
|
||||||
// DeleteAllGP deletes all rows in the slice, and panics on error.
|
// DeleteAllGP deletes all rows in the slice, and panics on error.
|
||||||
func (o {{$tableNameSingular}}Slice) DeleteAllGP() {
|
func (o {{$tableNameSingular}}Slice) DeleteAllGP() {
|
||||||
if err := o.DeleteAllG(); err != nil {
|
if err := o.DeleteAllG(); err != nil {
|
||||||
panic(boil.WrapErr(err))
|
panic(errors.Err(err))
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
// DeleteAllG deletes all rows in the slice.
|
// DeleteAllG deletes all rows in the slice.
|
||||||
func (o {{$tableNameSingular}}Slice) DeleteAllG() error {
|
func (o {{$tableNameSingular}}Slice) DeleteAllG() error {
|
||||||
if o == nil {
|
if o == nil {
|
||||||
return errors.New("{{.PkgName}}: no {{$tableNameSingular}} slice provided for delete all")
|
return errors.Err("{{.PkgName}}: no {{$tableNameSingular}} slice provided for delete all")
|
||||||
}
|
}
|
||||||
return o.DeleteAll(boil.GetDB())
|
return o.DeleteAll(boil.GetDB())
|
||||||
}
|
}
|
||||||
|
@ -105,14 +105,14 @@ func (o {{$tableNameSingular}}Slice) DeleteAllG() error {
|
||||||
// DeleteAllP deletes all rows in the slice, using an executor, and panics on error.
|
// DeleteAllP deletes all rows in the slice, using an executor, and panics on error.
|
||||||
func (o {{$tableNameSingular}}Slice) DeleteAllP(exec boil.Executor) {
|
func (o {{$tableNameSingular}}Slice) DeleteAllP(exec boil.Executor) {
|
||||||
if err := o.DeleteAll(exec); err != nil {
|
if err := o.DeleteAll(exec); err != nil {
|
||||||
panic(boil.WrapErr(err))
|
panic(errors.Err(err))
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
// DeleteAll deletes all rows in the slice, using an executor.
|
// DeleteAll deletes all rows in the slice, using an executor.
|
||||||
func (o {{$tableNameSingular}}Slice) DeleteAll(exec boil.Executor) error {
|
func (o {{$tableNameSingular}}Slice) DeleteAll(exec boil.Executor) error {
|
||||||
if o == nil {
|
if o == nil {
|
||||||
return errors.New("{{.PkgName}}: no {{$tableNameSingular}} slice provided for delete all")
|
return errors.Err("{{.PkgName}}: no {{$tableNameSingular}} slice provided for delete all")
|
||||||
}
|
}
|
||||||
|
|
||||||
if len(o) == 0 {
|
if len(o) == 0 {
|
||||||
|
@ -123,7 +123,7 @@ func (o {{$tableNameSingular}}Slice) DeleteAll(exec boil.Executor) error {
|
||||||
if len({{$varNameSingular}}BeforeDeleteHooks) != 0 {
|
if len({{$varNameSingular}}BeforeDeleteHooks) != 0 {
|
||||||
for _, obj := range o {
|
for _, obj := range o {
|
||||||
if err := obj.doBeforeDeleteHooks(exec); err != nil {
|
if err := obj.doBeforeDeleteHooks(exec); err != nil {
|
||||||
return err
|
return errors.Err(err)
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
@ -135,11 +135,8 @@ func (o {{$tableNameSingular}}Slice) DeleteAll(exec boil.Executor) error {
|
||||||
args = append(args, pkeyArgs...)
|
args = append(args, pkeyArgs...)
|
||||||
}
|
}
|
||||||
|
|
||||||
sql := fmt.Sprintf(
|
sql := "DELETE FROM {{$schemaTable}} WHERE " +
|
||||||
"DELETE FROM {{$schemaTable}} WHERE (%s) IN (%s)",
|
strmangle.WhereClauseRepeated(string(dialect.LQ), string(dialect.RQ), {{if .Dialect.IndexPlaceholders}}1{{else}}0{{end}}, {{$varNameSingular}}PrimaryKeyColumns, len(o))
|
||||||
strings.Join(strmangle.IdentQuoteSlice(dialect.LQ, dialect.RQ, {{$varNameSingular}}PrimaryKeyColumns), ","),
|
|
||||||
strmangle.Placeholders(dialect.IndexPlaceholders, len(o) * len({{$varNameSingular}}PrimaryKeyColumns), 1, len({{$varNameSingular}}PrimaryKeyColumns)),
|
|
||||||
)
|
|
||||||
|
|
||||||
if boil.DebugMode {
|
if boil.DebugMode {
|
||||||
fmt.Fprintln(boil.DebugWriter, sql)
|
fmt.Fprintln(boil.DebugWriter, sql)
|
||||||
|
@ -148,14 +145,14 @@ func (o {{$tableNameSingular}}Slice) DeleteAll(exec boil.Executor) error {
|
||||||
|
|
||||||
_, err := exec.Exec(sql, args...)
|
_, err := exec.Exec(sql, args...)
|
||||||
if err != nil {
|
if err != nil {
|
||||||
return errors.Wrap(err, "{{.PkgName}}: unable to delete all from {{$varNameSingular}} slice")
|
return errors.Prefix("{{.PkgName}}: unable to delete all from {{$varNameSingular}} slice", err)
|
||||||
}
|
}
|
||||||
|
|
||||||
{{if not .NoHooks -}}
|
{{if not .NoHooks -}}
|
||||||
if len({{$varNameSingular}}AfterDeleteHooks) != 0 {
|
if len({{$varNameSingular}}AfterDeleteHooks) != 0 {
|
||||||
for _, obj := range o {
|
for _, obj := range o {
|
||||||
if err := obj.doAfterDeleteHooks(exec); err != nil {
|
if err := obj.doAfterDeleteHooks(exec); err != nil {
|
||||||
return err
|
return errors.Err(err)
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
|
@ -5,21 +5,21 @@
|
||||||
// ReloadGP refetches the object from the database and panics on error.
|
// ReloadGP refetches the object from the database and panics on error.
|
||||||
func (o *{{$tableNameSingular}}) ReloadGP() {
|
func (o *{{$tableNameSingular}}) ReloadGP() {
|
||||||
if err := o.ReloadG(); err != nil {
|
if err := o.ReloadG(); err != nil {
|
||||||
panic(boil.WrapErr(err))
|
panic(errors.Err(err))
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
// ReloadP refetches the object from the database with an executor. Panics on error.
|
// ReloadP refetches the object from the database with an executor. Panics on error.
|
||||||
func (o *{{$tableNameSingular}}) ReloadP(exec boil.Executor) {
|
func (o *{{$tableNameSingular}}) ReloadP(exec boil.Executor) {
|
||||||
if err := o.Reload(exec); err != nil {
|
if err := o.Reload(exec); err != nil {
|
||||||
panic(boil.WrapErr(err))
|
panic(errors.Err(err))
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
// ReloadG refetches the object from the database using the primary keys.
|
// ReloadG refetches the object from the database using the primary keys.
|
||||||
func (o *{{$tableNameSingular}}) ReloadG() error {
|
func (o *{{$tableNameSingular}}) ReloadG() error {
|
||||||
if o == nil {
|
if o == nil {
|
||||||
return errors.New("{{.PkgName}}: no {{$tableNameSingular}} provided for reload")
|
return errors.Err("{{.PkgName}}: no {{$tableNameSingular}} provided for reload")
|
||||||
}
|
}
|
||||||
|
|
||||||
return o.Reload(boil.GetDB())
|
return o.Reload(boil.GetDB())
|
||||||
|
@ -30,7 +30,7 @@ func (o *{{$tableNameSingular}}) ReloadG() error {
|
||||||
func (o *{{$tableNameSingular}}) Reload(exec boil.Executor) error {
|
func (o *{{$tableNameSingular}}) Reload(exec boil.Executor) error {
|
||||||
ret, err := Find{{$tableNameSingular}}(exec, {{.Table.PKey.Columns | stringMap .StringFuncs.titleCase | prefixStringSlice "o." | join ", "}})
|
ret, err := Find{{$tableNameSingular}}(exec, {{.Table.PKey.Columns | stringMap .StringFuncs.titleCase | prefixStringSlice "o." | join ", "}})
|
||||||
if err != nil {
|
if err != nil {
|
||||||
return err
|
return errors.Err(err)
|
||||||
}
|
}
|
||||||
|
|
||||||
*o = *ret
|
*o = *ret
|
||||||
|
@ -42,7 +42,7 @@ func (o *{{$tableNameSingular}}) Reload(exec boil.Executor) error {
|
||||||
// Panics on error.
|
// Panics on error.
|
||||||
func (o *{{$tableNameSingular}}Slice) ReloadAllGP() {
|
func (o *{{$tableNameSingular}}Slice) ReloadAllGP() {
|
||||||
if err := o.ReloadAllG(); err != nil {
|
if err := o.ReloadAllG(); err != nil {
|
||||||
panic(boil.WrapErr(err))
|
panic(errors.Err(err))
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -51,7 +51,7 @@ func (o *{{$tableNameSingular}}Slice) ReloadAllGP() {
|
||||||
// Panics on error.
|
// Panics on error.
|
||||||
func (o *{{$tableNameSingular}}Slice) ReloadAllP(exec boil.Executor) {
|
func (o *{{$tableNameSingular}}Slice) ReloadAllP(exec boil.Executor) {
|
||||||
if err := o.ReloadAll(exec); err != nil {
|
if err := o.ReloadAll(exec); err != nil {
|
||||||
panic(boil.WrapErr(err))
|
panic(errors.Err(err))
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -59,7 +59,7 @@ func (o *{{$tableNameSingular}}Slice) ReloadAllP(exec boil.Executor) {
|
||||||
// and overwrites the original object slice with the newly updated slice.
|
// and overwrites the original object slice with the newly updated slice.
|
||||||
func (o *{{$tableNameSingular}}Slice) ReloadAllG() error {
|
func (o *{{$tableNameSingular}}Slice) ReloadAllG() error {
|
||||||
if o == nil {
|
if o == nil {
|
||||||
return errors.New("{{.PkgName}}: empty {{$tableNameSingular}}Slice provided for reload all")
|
return errors.Err("{{.PkgName}}: empty {{$tableNameSingular}}Slice provided for reload all")
|
||||||
}
|
}
|
||||||
|
|
||||||
return o.ReloadAll(boil.GetDB())
|
return o.ReloadAll(boil.GetDB())
|
||||||
|
@ -79,17 +79,14 @@ func (o *{{$tableNameSingular}}Slice) ReloadAll(exec boil.Executor) error {
|
||||||
args = append(args, pkeyArgs...)
|
args = append(args, pkeyArgs...)
|
||||||
}
|
}
|
||||||
|
|
||||||
sql := fmt.Sprintf(
|
sql := "SELECT {{$schemaTable}}.* FROM {{$schemaTable}} WHERE " +
|
||||||
"SELECT {{$schemaTable}}.* FROM {{$schemaTable}} WHERE (%s) IN (%s)",
|
strmangle.WhereClauseRepeated(string(dialect.LQ), string(dialect.RQ), {{if .Dialect.IndexPlaceholders}}1{{else}}0{{end}}, {{$varNameSingular}}PrimaryKeyColumns, len(*o))
|
||||||
strings.Join(strmangle.IdentQuoteSlice(dialect.LQ, dialect.RQ, {{$varNameSingular}}PrimaryKeyColumns), ","),
|
|
||||||
strmangle.Placeholders(dialect.IndexPlaceholders, len(*o) * len({{$varNameSingular}}PrimaryKeyColumns), 1, len({{$varNameSingular}}PrimaryKeyColumns)),
|
|
||||||
)
|
|
||||||
|
|
||||||
q := queries.Raw(exec, sql, args...)
|
q := queries.Raw(exec, sql, args...)
|
||||||
|
|
||||||
err := q.Bind(&{{$varNamePlural}})
|
err := q.Bind(&{{$varNamePlural}})
|
||||||
if err != nil {
|
if err != nil {
|
||||||
return errors.Wrap(err, "{{.PkgName}}: unable to reload all in {{$tableNameSingular}}Slice")
|
return errors.Prefix("{{.PkgName}}: unable to reload all in {{$tableNameSingular}}Slice", err)
|
||||||
}
|
}
|
||||||
|
|
||||||
*o = {{$varNamePlural}}
|
*o = {{$varNamePlural}}
|
||||||
|
|
|
@ -1,13 +1,17 @@
|
||||||
{{- $tableNameSingular := .Table.Name | singular | titleCase -}}
|
{{- $tableNameSingular := .Table.Name | singular | titleCase -}}
|
||||||
|
{{- $varNameSingular := .Table.Name | singular | camelCase -}}
|
||||||
{{- $colDefs := sqlColDefinitions .Table.Columns .Table.PKey.Columns -}}
|
{{- $colDefs := sqlColDefinitions .Table.Columns .Table.PKey.Columns -}}
|
||||||
{{- $pkNames := $colDefs.Names | stringMap .StringFuncs.camelCase -}}
|
{{- $pkNames := $colDefs.Names | stringMap .StringFuncs.camelCase | stringMap .StringFuncs.replaceReserved -}}
|
||||||
{{- $pkArgs := joinSlices " " $pkNames $colDefs.Types | join ", " -}}
|
{{- $pkArgs := joinSlices " " $pkNames $colDefs.Types | join ", " -}}
|
||||||
{{- $schemaTable := .Table.Name | .SchemaTable}}
|
{{- $schemaTable := .Table.Name | .SchemaTable}}
|
||||||
// {{$tableNameSingular}}Exists checks if the {{$tableNameSingular}} row exists.
|
// {{$tableNameSingular}}Exists checks if the {{$tableNameSingular}} row exists.
|
||||||
func {{$tableNameSingular}}Exists(exec boil.Executor, {{$pkArgs}}) (bool, error) {
|
func {{$tableNameSingular}}Exists(exec boil.Executor, {{$pkArgs}}) (bool, error) {
|
||||||
var exists bool
|
var exists bool
|
||||||
|
{{if eq .DriverName "mssql" -}}
|
||||||
|
sql := "select case when exists(select top(1) 1 from {{$schemaTable}} where {{if .Dialect.IndexPlaceholders}}{{whereClause .LQ .RQ 1 .Table.PKey.Columns}}{{else}}{{whereClause .LQ .RQ 0 .Table.PKey.Columns}}{{end}}) then 1 else 0 end"
|
||||||
|
{{- else -}}
|
||||||
sql := "select exists(select 1 from {{$schemaTable}} where {{if .Dialect.IndexPlaceholders}}{{whereClause .LQ .RQ 1 .Table.PKey.Columns}}{{else}}{{whereClause .LQ .RQ 0 .Table.PKey.Columns}}{{end}} limit 1)"
|
sql := "select exists(select 1 from {{$schemaTable}} where {{if .Dialect.IndexPlaceholders}}{{whereClause .LQ .RQ 1 .Table.PKey.Columns}}{{else}}{{whereClause .LQ .RQ 0 .Table.PKey.Columns}}{{end}} limit 1)"
|
||||||
|
{{- end}}
|
||||||
|
|
||||||
if boil.DebugMode {
|
if boil.DebugMode {
|
||||||
fmt.Fprintln(boil.DebugWriter, sql)
|
fmt.Fprintln(boil.DebugWriter, sql)
|
||||||
|
@ -18,7 +22,7 @@ func {{$tableNameSingular}}Exists(exec boil.Executor, {{$pkArgs}}) (bool, error)
|
||||||
|
|
||||||
err := row.Scan(&exists)
|
err := row.Scan(&exists)
|
||||||
if err != nil {
|
if err != nil {
|
||||||
return false, errors.Wrap(err, "{{.PkgName}}: unable to check if {{.Table.Name}} exists")
|
return false, errors.Prefix("{{.PkgName}}: unable to check if {{.Table.Name}} exists", err)
|
||||||
}
|
}
|
||||||
|
|
||||||
return exists, nil
|
return exists, nil
|
||||||
|
@ -33,7 +37,7 @@ func {{$tableNameSingular}}ExistsG({{$pkArgs}}) (bool, error) {
|
||||||
func {{$tableNameSingular}}ExistsGP({{$pkArgs}}) bool {
|
func {{$tableNameSingular}}ExistsGP({{$pkArgs}}) bool {
|
||||||
e, err := {{$tableNameSingular}}Exists(boil.GetDB(), {{$pkNames | join ", "}})
|
e, err := {{$tableNameSingular}}Exists(boil.GetDB(), {{$pkNames | join ", "}})
|
||||||
if err != nil {
|
if err != nil {
|
||||||
panic(boil.WrapErr(err))
|
panic(errors.Err(err))
|
||||||
}
|
}
|
||||||
|
|
||||||
return e
|
return e
|
||||||
|
@ -43,8 +47,43 @@ func {{$tableNameSingular}}ExistsGP({{$pkArgs}}) bool {
|
||||||
func {{$tableNameSingular}}ExistsP(exec boil.Executor, {{$pkArgs}}) bool {
|
func {{$tableNameSingular}}ExistsP(exec boil.Executor, {{$pkArgs}}) bool {
|
||||||
e, err := {{$tableNameSingular}}Exists(exec, {{$pkNames | join ", "}})
|
e, err := {{$tableNameSingular}}Exists(exec, {{$pkNames | join ", "}})
|
||||||
if err != nil {
|
if err != nil {
|
||||||
panic(boil.WrapErr(err))
|
panic(errors.Err(err))
|
||||||
}
|
}
|
||||||
|
|
||||||
return e
|
return e
|
||||||
}
|
}
|
||||||
|
|
||||||
|
// IsNew() checks if record exists in db (aka if its primary key is set).
|
||||||
|
func (o *{{$tableNameSingular}}) IsNew() bool {
|
||||||
|
r := reflect.ValueOf(o).Elem()
|
||||||
|
for i := 0; i < r.NumField(); i++ {
|
||||||
|
column := r.Type().Field(i).Tag.Get("boil")
|
||||||
|
for _, pkColumn := range {{$varNameSingular}}PrimaryKeyColumns {
|
||||||
|
if column == pkColumn {
|
||||||
|
field := r.Field(i)
|
||||||
|
if field.Interface() != reflect.Zero(field.Type()).Interface() {
|
||||||
|
return false
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
return true
|
||||||
|
}
|
||||||
|
|
||||||
|
// Save() inserts the record if it does not exist, or updates it if it does.
|
||||||
|
func (o *{{$tableNameSingular}}) Save(exec boil.Executor, whitelist ...string) error {
|
||||||
|
if o.IsNew() {
|
||||||
|
return o.Insert(exec, whitelist...)
|
||||||
|
} else {
|
||||||
|
return o.Update(exec, whitelist...)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
// SaveG() inserts the record if it does not exist, or updates it if it does.
|
||||||
|
func (o *{{$tableNameSingular}}) SaveG(whitelist ...string) error {
|
||||||
|
if o.IsNew() {
|
||||||
|
return o.InsertG(whitelist...)
|
||||||
|
} else {
|
||||||
|
return o.UpdateG(whitelist...)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
33
templates/22_query.tpl
Normal file
33
templates/22_query.tpl
Normal file
|
@ -0,0 +1,33 @@
|
||||||
|
{{- $tableNameSingular := .Table.Name | singular | titleCase -}}
|
||||||
|
|
||||||
|
// {{$tableNameSingular}}NewQuery filters query results
|
||||||
|
func {{$tableNameSingular}}NewQuery(exec boil.Executor) *{{$tableNameSingular}}Query {
|
||||||
|
return &{{$tableNameSingular}}Query{NewQuery(exec, qm.Select("*"), qm.From("{{.Table.Name | .SchemaTable}}"))}
|
||||||
|
}
|
||||||
|
|
||||||
|
// {{$tableNameSingular}}NewQuery filters query results
|
||||||
|
func {{$tableNameSingular}}NewQueryG() *{{$tableNameSingular}}Query {
|
||||||
|
return {{$tableNameSingular}}NewQuery(boil.GetDB())
|
||||||
|
}
|
||||||
|
|
||||||
|
// Where filters query results
|
||||||
|
func (q *{{$tableNameSingular}}Query) Where(filters {{$tableNameSingular}}Filter) *{{$tableNameSingular}}Query {
|
||||||
|
r := reflect.ValueOf(filters)
|
||||||
|
for i := 0; i < r.NumField(); i++ {
|
||||||
|
f := r.Field(i)
|
||||||
|
if f.Elem().IsValid() {
|
||||||
|
if nullable, ok := f.Elem().Interface().(null.Nullable); ok && nullable.IsNull() {
|
||||||
|
queries.AppendWhere(q.Query, r.Type().Field(i).Tag.Get("boil")+" IS NULL")
|
||||||
|
} else {
|
||||||
|
queries.AppendWhere(q.Query, r.Type().Field(i).Tag.Get("boil")+" = ?", f.Elem().Interface())
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
return q
|
||||||
|
}
|
||||||
|
|
||||||
|
// Limit limits query results
|
||||||
|
func (q *{{$tableNameSingular}}Query) Limit(limit int) *{{$tableNameSingular}}Query {
|
||||||
|
queries.SetLimit(q.Query, limit)
|
||||||
|
return q
|
||||||
|
}
|
107
templates/23_merge.tpl
Normal file
107
templates/23_merge.tpl
Normal file
|
@ -0,0 +1,107 @@
|
||||||
|
{{- $tableNamePlural := .Table.Name | plural | titleCase -}}
|
||||||
|
{{- $tableNameSingular := .Table.Name | singular | titleCase -}}
|
||||||
|
{{- if .Table.IsJoinTable -}}
|
||||||
|
{{- else -}}
|
||||||
|
{{- $dot := . }}
|
||||||
|
// Merge combines two {{$tableNamePlural}} into one. The primary record will be kept, and the secondary will be deleted.
|
||||||
|
func Merge{{$tableNamePlural}}(exec boil.Executor, primaryID uint64, secondaryID uint64) (err error) {
|
||||||
|
tx, ok := exec.(boil.Transactor)
|
||||||
|
if !ok {
|
||||||
|
txdb, ok := exec.(boil.Beginner)
|
||||||
|
if !ok {
|
||||||
|
return errors.Err("database does not support transactions")
|
||||||
|
}
|
||||||
|
|
||||||
|
tx, err = txdb.Begin()
|
||||||
|
if err != nil {
|
||||||
|
return errors.Err(err)
|
||||||
|
}
|
||||||
|
|
||||||
|
defer func() {
|
||||||
|
if p := recover(); p != nil {
|
||||||
|
tx.Rollback()
|
||||||
|
panic(p) // Rollback, then propagate panic
|
||||||
|
} else if err != nil {
|
||||||
|
tx.Rollback()
|
||||||
|
} else {
|
||||||
|
err = tx.Commit()
|
||||||
|
}
|
||||||
|
}()
|
||||||
|
}
|
||||||
|
|
||||||
|
primary, err := Find{{$tableNameSingular}}(tx, primaryID)
|
||||||
|
if err != nil {
|
||||||
|
return errors.Err(err)
|
||||||
|
} else if primary == nil {
|
||||||
|
return errors.Err("primary {{$tableNameSingular}} not found")
|
||||||
|
}
|
||||||
|
|
||||||
|
secondary, err := Find{{$tableNameSingular}}(tx, secondaryID)
|
||||||
|
if err != nil {
|
||||||
|
return errors.Err(err)
|
||||||
|
} else if secondary == nil {
|
||||||
|
return errors.Err("secondary {{$tableNameSingular}} not found")
|
||||||
|
}
|
||||||
|
|
||||||
|
foreignKeys := []foreignKey{
|
||||||
|
{{- range .Tables -}}
|
||||||
|
{{- range .FKeys -}}
|
||||||
|
{{- if eq $dot.Table.Name .ForeignTable }}
|
||||||
|
{foreignTable: "{{.Table}}", foreignColumn: "{{.Column}}"},
|
||||||
|
{{- end -}}
|
||||||
|
{{- end -}}
|
||||||
|
{{- end }}
|
||||||
|
}
|
||||||
|
|
||||||
|
conflictingKeys := []conflictingUniqueKey{
|
||||||
|
{{- range .Tables -}}
|
||||||
|
{{- $table := . -}}
|
||||||
|
{{- range .FKeys -}}
|
||||||
|
{{- $fk := . -}}
|
||||||
|
{{- if eq $dot.Table.Name .ForeignTable -}}
|
||||||
|
{{- range $table.UKeys -}}
|
||||||
|
{{- if setInclude $fk.Column .Columns }}
|
||||||
|
{table: "{{$fk.Table}}", objectIdColumn: "{{$fk.Column}}", columns: []string{`{{ .Columns | join "`,`" }}`}},
|
||||||
|
{{- end -}}
|
||||||
|
{{- end -}}
|
||||||
|
{{- end -}}
|
||||||
|
{{- end -}}
|
||||||
|
{{- end }}
|
||||||
|
}
|
||||||
|
|
||||||
|
err = mergeModels(tx, primaryID, secondaryID, foreignKeys, conflictingKeys)
|
||||||
|
if err != nil {
|
||||||
|
return err
|
||||||
|
}
|
||||||
|
|
||||||
|
pr := reflect.ValueOf(primary)
|
||||||
|
sr := reflect.ValueOf(secondary)
|
||||||
|
// for any column thats null on the primary and not null on the secondary, copy from secondary to primary
|
||||||
|
for i := 0; i < sr.Elem().NumField(); i++ {
|
||||||
|
pf := pr.Elem().Field(i)
|
||||||
|
sf := sr.Elem().Field(i)
|
||||||
|
if sf.IsValid() {
|
||||||
|
if nullable, ok := sf.Interface().(null.Nullable); ok && !nullable.IsNull() && pf.Interface().(null.Nullable).IsNull() {
|
||||||
|
pf.Set(sf)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
err = primary.Update(tx)
|
||||||
|
if err != nil {
|
||||||
|
return err
|
||||||
|
}
|
||||||
|
|
||||||
|
err = secondary.Delete(tx)
|
||||||
|
if err != nil {
|
||||||
|
return err
|
||||||
|
}
|
||||||
|
|
||||||
|
return nil
|
||||||
|
}
|
||||||
|
|
||||||
|
// Merge combines two {{$tableNamePlural}} into one. The primary record will be kept, and the secondary will be deleted.
|
||||||
|
func Merge{{$tableNamePlural}}G(primaryID uint64, secondaryID uint64) error {
|
||||||
|
return Merge{{$tableNamePlural}}(boil.GetDB(), primaryID, secondaryID)
|
||||||
|
}
|
||||||
|
{{- end -}}{{/* join table */}}
|
|
@ -2,6 +2,7 @@ var dialect = queries.Dialect{
|
||||||
LQ: 0x{{printf "%x" .Dialect.LQ}},
|
LQ: 0x{{printf "%x" .Dialect.LQ}},
|
||||||
RQ: 0x{{printf "%x" .Dialect.RQ}},
|
RQ: 0x{{printf "%x" .Dialect.RQ}},
|
||||||
IndexPlaceholders: {{.Dialect.IndexPlaceholders}},
|
IndexPlaceholders: {{.Dialect.IndexPlaceholders}},
|
||||||
|
UseTopClause: {{.Dialect.UseTopClause}},
|
||||||
}
|
}
|
||||||
|
|
||||||
// NewQueryG initializes a new Query using the passed in QueryMods
|
// NewQueryG initializes a new Query using the passed in QueryMods
|
||||||
|
@ -18,3 +19,168 @@ func NewQuery(exec boil.Executor, mods ...qm.QueryMod) *queries.Query {
|
||||||
|
|
||||||
return q
|
return q
|
||||||
}
|
}
|
||||||
|
|
||||||
|
func mergeModels(tx boil.Executor, primaryID uint64, secondaryID uint64, foreignKeys []foreignKey, conflictingKeys []conflictingUniqueKey) error {
|
||||||
|
if len(foreignKeys) < 1 {
|
||||||
|
return nil
|
||||||
|
}
|
||||||
|
var err error
|
||||||
|
|
||||||
|
for _, conflict := range conflictingKeys {
|
||||||
|
if len(conflict.columns) == 1 && conflict.columns[0] == conflict.objectIdColumn {
|
||||||
|
err = deleteOneToOneConflictsBeforeMerge(tx, conflict, primaryID, secondaryID)
|
||||||
|
} else {
|
||||||
|
err = deleteOneToManyConflictsBeforeMerge(tx, conflict, primaryID, secondaryID)
|
||||||
|
}
|
||||||
|
if err != nil {
|
||||||
|
return err
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
for _, fk := range foreignKeys {
|
||||||
|
// TODO: use NewQuery here, not plain sql
|
||||||
|
query := fmt.Sprintf(
|
||||||
|
"UPDATE %s SET %s = %s WHERE %s = %s",
|
||||||
|
fk.foreignTable, fk.foreignColumn, strmangle.Placeholders(dialect.IndexPlaceholders, 1, 1, 1),
|
||||||
|
fk.foreignColumn, strmangle.Placeholders(dialect.IndexPlaceholders, 1, 2, 1),
|
||||||
|
)
|
||||||
|
_, err = tx.Exec(query, primaryID, secondaryID)
|
||||||
|
if err != nil {
|
||||||
|
return errors.Err(err)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
return checkMerge(tx, foreignKeys)
|
||||||
|
}
|
||||||
|
|
||||||
|
func deleteOneToOneConflictsBeforeMerge(tx boil.Executor, conflict conflictingUniqueKey, primaryID uint64, secondaryID uint64) error {
|
||||||
|
query := fmt.Sprintf(
|
||||||
|
"SELECT COUNT(*) FROM %s WHERE %s IN (%s)",
|
||||||
|
conflict.table, conflict.objectIdColumn,
|
||||||
|
strmangle.Placeholders(dialect.IndexPlaceholders, 2, 1, 1),
|
||||||
|
)
|
||||||
|
|
||||||
|
var count int
|
||||||
|
err := tx.QueryRow(query, primaryID, secondaryID).Scan(&count)
|
||||||
|
if err != nil {
|
||||||
|
return errors.Err(err)
|
||||||
|
}
|
||||||
|
|
||||||
|
if count > 2 {
|
||||||
|
return errors.Err("it should not be possible to have more than two rows here")
|
||||||
|
} else if count != 2 {
|
||||||
|
return nil // no conflicting rows
|
||||||
|
}
|
||||||
|
|
||||||
|
query = fmt.Sprintf(
|
||||||
|
"DELETE FROM %s WHERE %s = %s",
|
||||||
|
conflict.table, conflict.objectIdColumn, strmangle.Placeholders(dialect.IndexPlaceholders, 1, 1, 1),
|
||||||
|
)
|
||||||
|
|
||||||
|
_, err = tx.Exec(query, secondaryID)
|
||||||
|
return errors.Err(err)
|
||||||
|
}
|
||||||
|
|
||||||
|
func deleteOneToManyConflictsBeforeMerge(tx boil.Executor, conflict conflictingUniqueKey, primaryID uint64, secondaryID uint64) error {
|
||||||
|
conflictingColumns := strmangle.SetComplement(conflict.columns, []string{conflict.objectIdColumn})
|
||||||
|
|
||||||
|
query := fmt.Sprintf(
|
||||||
|
"SELECT %s FROM %s WHERE %s IN (%s) GROUP BY %s HAVING count(distinct %s) > 1",
|
||||||
|
strings.Join(conflictingColumns, ","), conflict.table, conflict.objectIdColumn,
|
||||||
|
strmangle.Placeholders(dialect.IndexPlaceholders, 2, 1, 1),
|
||||||
|
strings.Join(conflictingColumns, ","), conflict.objectIdColumn,
|
||||||
|
)
|
||||||
|
|
||||||
|
//The selectParams should be the ObjectIDs to search for regarding the conflict.
|
||||||
|
rows, err := tx.Query(query, primaryID, secondaryID)
|
||||||
|
if err != nil {
|
||||||
|
return errors.Err(err)
|
||||||
|
}
|
||||||
|
|
||||||
|
//Since we don't don't know if advance how many columns the query returns, we have dynamically assign them to be
|
||||||
|
// used in the delete query.
|
||||||
|
colNames, err := rows.Columns()
|
||||||
|
if err != nil {
|
||||||
|
return errors.Err(err)
|
||||||
|
}
|
||||||
|
//Each row result of the query needs to be removed for being a conflicting row. Store each row's keys in an array.
|
||||||
|
var rowsToRemove = [][]interface{}(nil)
|
||||||
|
for rows.Next() {
|
||||||
|
//Set pointers for dynamic scan
|
||||||
|
iColPtrs := make([]interface{}, len(colNames))
|
||||||
|
for i := 0; i < len(colNames); i++ {
|
||||||
|
s := string("")
|
||||||
|
iColPtrs[i] = &s
|
||||||
|
}
|
||||||
|
//Dynamically scan n columns
|
||||||
|
err = rows.Scan(iColPtrs...)
|
||||||
|
if err != nil {
|
||||||
|
return errors.Err(err)
|
||||||
|
}
|
||||||
|
//Grab scanned values for query arguments
|
||||||
|
iCol := make([]interface{}, len(colNames))
|
||||||
|
for i, col := range iColPtrs {
|
||||||
|
x := col.(*string)
|
||||||
|
iCol[i] = *x
|
||||||
|
}
|
||||||
|
rowsToRemove = append(rowsToRemove, iCol)
|
||||||
|
}
|
||||||
|
defer rows.Close()
|
||||||
|
|
||||||
|
//This query will adjust dynamically depending on the number of conflicting keys, adding AND expressions for each
|
||||||
|
// key to ensure the right conflicting rows are deleted.
|
||||||
|
query = fmt.Sprintf(
|
||||||
|
"DELETE FROM %s %s",
|
||||||
|
conflict.table,
|
||||||
|
"WHERE "+strings.Join(conflict.columns, " = ? AND ")+" = ?",
|
||||||
|
)
|
||||||
|
|
||||||
|
//There could be multiple conflicting rows between ObjectIDs. In the SELECT query we grab each row and their column
|
||||||
|
// keys to be deleted here in a loop.
|
||||||
|
for _, rowToDelete := range rowsToRemove {
|
||||||
|
rowToDelete = append(rowToDelete, secondaryID)
|
||||||
|
_, err = tx.Exec(query, rowToDelete...)
|
||||||
|
if err != nil {
|
||||||
|
return errors.Err(err)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
return nil
|
||||||
|
}
|
||||||
|
|
||||||
|
func checkMerge(tx boil.Executor, foreignKeys []foreignKey) error {
|
||||||
|
uniqueColumns := []interface{}{}
|
||||||
|
uniqueColumnNames := map[string]bool{}
|
||||||
|
handledTablesColumns := map[string]bool{}
|
||||||
|
|
||||||
|
for _, fk := range foreignKeys {
|
||||||
|
handledTablesColumns[fk.foreignTable+"."+fk.foreignColumn] = true
|
||||||
|
if _, ok := uniqueColumnNames[fk.foreignColumn]; !ok {
|
||||||
|
uniqueColumns = append(uniqueColumns, fk.foreignColumn)
|
||||||
|
uniqueColumnNames[fk.foreignColumn] = true
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
q := fmt.Sprintf(
|
||||||
|
`SELECT table_name, column_name FROM INFORMATION_SCHEMA.COLUMNS WHERE TABLE_SCHEMA=DATABASE() AND column_name IN (%s)`,
|
||||||
|
strmangle.Placeholders(dialect.IndexPlaceholders, len(uniqueColumns), 1, 1),
|
||||||
|
)
|
||||||
|
rows, err := tx.Query(q, uniqueColumns...)
|
||||||
|
defer rows.Close()
|
||||||
|
if err != nil {
|
||||||
|
return errors.Err(err)
|
||||||
|
}
|
||||||
|
|
||||||
|
for rows.Next() {
|
||||||
|
var tableName string
|
||||||
|
var columnName string
|
||||||
|
err = rows.Scan(&tableName, &columnName)
|
||||||
|
if err != nil {
|
||||||
|
return errors.Err(err)
|
||||||
|
}
|
||||||
|
|
||||||
|
if _, exists := handledTablesColumns[tableName+"."+columnName]; !exists {
|
||||||
|
return errors.Err("missing merge for " + tableName + "." + columnName)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
return nil
|
||||||
|
}
|
||||||
|
|
9
templates/singleton/boil_table_names.tpl
Normal file
9
templates/singleton/boil_table_names.tpl
Normal file
|
@ -0,0 +1,9 @@
|
||||||
|
var TableNames = struct {
|
||||||
|
{{range $table := .Tables -}}
|
||||||
|
{{titleCase $table.Name}} string
|
||||||
|
{{end -}}
|
||||||
|
}{
|
||||||
|
{{range $table := .Tables -}}
|
||||||
|
{{titleCase $table.Name}}: "{{$table.Name}}",
|
||||||
|
{{end -}}
|
||||||
|
}
|
|
@ -1,10 +1,26 @@
|
||||||
// M type is for providing columns and column values to UpdateAll.
|
// M type is for providing columns and column values to UpdateAll.
|
||||||
type M map[string]interface{}
|
type M map[string]interface{}
|
||||||
|
|
||||||
|
// foreignKey connects two tables. When merging records, foreign keys from secondary record must
|
||||||
|
// be reassigned to primary record.
|
||||||
|
type foreignKey struct {
|
||||||
|
foreignTable string
|
||||||
|
foreignColumn string
|
||||||
|
}
|
||||||
|
|
||||||
|
// conflictingUniqueKey records a merge conflict. If two rows exist with the same value in the
|
||||||
|
// conflicting column for two records being merged, one row must be deleted.
|
||||||
|
type conflictingUniqueKey struct {
|
||||||
|
table string
|
||||||
|
objectIdColumn string
|
||||||
|
columns []string
|
||||||
|
}
|
||||||
|
|
||||||
|
|
||||||
// ErrSyncFail occurs during insert when the record could not be retrieved in
|
// ErrSyncFail occurs during insert when the record could not be retrieved in
|
||||||
// order to populate default value information. This usually happens when LastInsertId
|
// order to populate default value information. This usually happens when LastInsertId
|
||||||
// fails or there was a primary key configuration that was not resolvable.
|
// fails or there was a primary key configuration that was not resolvable.
|
||||||
var ErrSyncFail = errors.New("{{.PkgName}}: failed to synchronize data after insert")
|
var ErrSyncFail = errors.Base("{{.PkgName}}: failed to synchronize data after insert")
|
||||||
|
|
||||||
type insertCache struct {
|
type insertCache struct {
|
||||||
query string
|
query string
|
||||||
|
|
|
@ -8,7 +8,7 @@ func test{{$tableNamePlural}}Delete(t *testing.T) {
|
||||||
seed := randomize.NewSeed()
|
seed := randomize.NewSeed()
|
||||||
var err error
|
var err error
|
||||||
{{$varNameSingular}} := &{{$tableNameSingular}}{}
|
{{$varNameSingular}} := &{{$tableNameSingular}}{}
|
||||||
if err = randomize.Struct(seed, {{$varNameSingular}}, {{$varNameSingular}}DBTypes, true); err != nil {
|
if err = randomize.Struct(seed, {{$varNameSingular}}, {{$varNameSingular}}DBTypes, true, {{$varNameSingular}}ColumnsWithDefault...); err != nil {
|
||||||
t.Errorf("Unable to randomize {{$tableNameSingular}} struct: %s", err)
|
t.Errorf("Unable to randomize {{$tableNameSingular}} struct: %s", err)
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -38,7 +38,7 @@ func test{{$tableNamePlural}}QueryDeleteAll(t *testing.T) {
|
||||||
seed := randomize.NewSeed()
|
seed := randomize.NewSeed()
|
||||||
var err error
|
var err error
|
||||||
{{$varNameSingular}} := &{{$tableNameSingular}}{}
|
{{$varNameSingular}} := &{{$tableNameSingular}}{}
|
||||||
if err = randomize.Struct(seed, {{$varNameSingular}}, {{$varNameSingular}}DBTypes, true); err != nil {
|
if err = randomize.Struct(seed, {{$varNameSingular}}, {{$varNameSingular}}DBTypes, true, {{$varNameSingular}}ColumnsWithDefault...); err != nil {
|
||||||
t.Errorf("Unable to randomize {{$tableNameSingular}} struct: %s", err)
|
t.Errorf("Unable to randomize {{$tableNameSingular}} struct: %s", err)
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -68,7 +68,7 @@ func test{{$tableNamePlural}}SliceDeleteAll(t *testing.T) {
|
||||||
seed := randomize.NewSeed()
|
seed := randomize.NewSeed()
|
||||||
var err error
|
var err error
|
||||||
{{$varNameSingular}} := &{{$tableNameSingular}}{}
|
{{$varNameSingular}} := &{{$tableNameSingular}}{}
|
||||||
if err = randomize.Struct(seed, {{$varNameSingular}}, {{$varNameSingular}}DBTypes, true); err != nil {
|
if err = randomize.Struct(seed, {{$varNameSingular}}, {{$varNameSingular}}DBTypes, true, {{$varNameSingular}}ColumnsWithDefault...); err != nil {
|
||||||
t.Errorf("Unable to randomize {{$tableNameSingular}} struct: %s", err)
|
t.Errorf("Unable to randomize {{$tableNameSingular}} struct: %s", err)
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
|
@ -41,7 +41,7 @@ func test{{$tableNamePlural}}InsertWhitelist(t *testing.T) {
|
||||||
|
|
||||||
tx := MustTx(boil.Begin())
|
tx := MustTx(boil.Begin())
|
||||||
defer tx.Rollback()
|
defer tx.Rollback()
|
||||||
if err = {{$varNameSingular}}.Insert(tx, {{$varNameSingular}}Columns...); err != nil {
|
if err = {{$varNameSingular}}.Insert(tx, {{$varNameSingular}}ColumnsWithoutDefault...); err != nil {
|
||||||
t.Error(err)
|
t.Error(err)
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
131
templates_test/main_test/mssql_main.tpl
Normal file
131
templates_test/main_test/mssql_main.tpl
Normal file
|
@ -0,0 +1,131 @@
|
||||||
|
type mssqlTester struct {
|
||||||
|
dbConn *sql.DB
|
||||||
|
dbName string
|
||||||
|
host string
|
||||||
|
user string
|
||||||
|
pass string
|
||||||
|
sslmode string
|
||||||
|
port int
|
||||||
|
testDBName string
|
||||||
|
}
|
||||||
|
|
||||||
|
func init() {
|
||||||
|
dbMain = &mssqlTester{}
|
||||||
|
}
|
||||||
|
|
||||||
|
func (m *mssqlTester) setup() error {
|
||||||
|
var err error
|
||||||
|
m.dbName = viper.GetString("mssql.dbname")
|
||||||
|
m.host = viper.GetString("mssql.host")
|
||||||
|
m.user = viper.GetString("mssql.user")
|
||||||
|
m.pass = viper.GetString("mssql.pass")
|
||||||
|
m.port = viper.GetInt("mssql.port")
|
||||||
|
m.sslmode = viper.GetString("mssql.sslmode")
|
||||||
|
// Create a randomized db name.
|
||||||
|
m.testDBName = randomize.StableDBName(m.dbName)
|
||||||
|
|
||||||
|
if err = m.dropTestDB(); err != nil {
|
||||||
|
return errors.Err(err)
|
||||||
|
}
|
||||||
|
if err = m.createTestDB(); err != nil {
|
||||||
|
return errors.Err(err)
|
||||||
|
}
|
||||||
|
|
||||||
|
createCmd := exec.Command("sqlcmd", "-S", m.host, "-U", m.user, "-P", m.pass, "-d", m.testDBName)
|
||||||
|
|
||||||
|
f, err := os.Open("tables_schema.sql")
|
||||||
|
if err != nil {
|
||||||
|
return errors.Prefix("failed to open tables_schema.sql file", err)
|
||||||
|
}
|
||||||
|
|
||||||
|
defer f.Close()
|
||||||
|
|
||||||
|
createCmd.Stdin = newFKeyDestroyer(rgxMSSQLkey, f)
|
||||||
|
|
||||||
|
if err = createCmd.Start(); err != nil {
|
||||||
|
return errors.Prefix("failed to start sqlcmd command", err)
|
||||||
|
}
|
||||||
|
|
||||||
|
if err = createCmd.Wait(); err != nil {
|
||||||
|
fmt.Println(err)
|
||||||
|
return errors.Prefix("failed to wait for sqlcmd command", err)
|
||||||
|
}
|
||||||
|
|
||||||
|
return nil
|
||||||
|
}
|
||||||
|
|
||||||
|
func (m *mssqlTester) sslMode(mode string) string {
|
||||||
|
switch mode {
|
||||||
|
case "true":
|
||||||
|
return "true"
|
||||||
|
case "false":
|
||||||
|
return "false"
|
||||||
|
default:
|
||||||
|
return "disable"
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
func (m *mssqlTester) createTestDB() error {
|
||||||
|
sql := fmt.Sprintf(`
|
||||||
|
CREATE DATABASE %s;
|
||||||
|
GO
|
||||||
|
ALTER DATABASE %[1]s
|
||||||
|
SET READ_COMMITTED_SNAPSHOT ON;
|
||||||
|
GO`, m.testDBName)
|
||||||
|
return m.runCmd(sql, "sqlcmd", "-S", m.host, "-U", m.user, "-P", m.pass)
|
||||||
|
}
|
||||||
|
|
||||||
|
func (m *mssqlTester) dropTestDB() error {
|
||||||
|
// Since MS SQL 2016 it can be done with
|
||||||
|
// DROP DATABASE [ IF EXISTS ] { database_name | database_snapshot_name } [ ,...n ] [;]
|
||||||
|
sql := fmt.Sprintf(`
|
||||||
|
IF EXISTS(SELECT name FROM sys.databases
|
||||||
|
WHERE name = '%s')
|
||||||
|
DROP DATABASE %s
|
||||||
|
GO`, m.testDBName, m.testDBName)
|
||||||
|
return m.runCmd(sql, "sqlcmd", "-S", m.host, "-U", m.user, "-P", m.pass)
|
||||||
|
}
|
||||||
|
|
||||||
|
func (m *mssqlTester) teardown() error {
|
||||||
|
if m.dbConn != nil {
|
||||||
|
m.dbConn.Close()
|
||||||
|
}
|
||||||
|
|
||||||
|
if err := m.dropTestDB(); err != nil {
|
||||||
|
return errors.Err(err)
|
||||||
|
}
|
||||||
|
|
||||||
|
return nil
|
||||||
|
}
|
||||||
|
|
||||||
|
func (m *mssqlTester) runCmd(stdin, command string, args ...string) error {
|
||||||
|
cmd := exec.Command(command, args...)
|
||||||
|
cmd.Stdin = strings.NewReader(stdin)
|
||||||
|
|
||||||
|
stdout := &bytes.Buffer{}
|
||||||
|
stderr := &bytes.Buffer{}
|
||||||
|
cmd.Stdout = stdout
|
||||||
|
cmd.Stderr = stderr
|
||||||
|
if err := cmd.Run(); err != nil {
|
||||||
|
fmt.Println("failed running:", command, args)
|
||||||
|
fmt.Println(stdout.String())
|
||||||
|
fmt.Println(stderr.String())
|
||||||
|
return errors.Err(err)
|
||||||
|
}
|
||||||
|
|
||||||
|
return nil
|
||||||
|
}
|
||||||
|
|
||||||
|
func (m *mssqlTester) conn() (*sql.DB, error) {
|
||||||
|
if m.dbConn != nil {
|
||||||
|
return m.dbConn, nil
|
||||||
|
}
|
||||||
|
|
||||||
|
var err error
|
||||||
|
m.dbConn, err = sql.Open("mssql", drivers.MSSQLBuildQueryString(m.user, m.pass, m.testDBName, m.host, m.port, m.sslmode))
|
||||||
|
if err != nil {
|
||||||
|
return nil, err
|
||||||
|
}
|
||||||
|
|
||||||
|
return m.dbConn, nil
|
||||||
|
}
|
|
@ -30,14 +30,14 @@ func (m *mysqlTester) setup() error {
|
||||||
m.testDBName = randomize.StableDBName(m.dbName)
|
m.testDBName = randomize.StableDBName(m.dbName)
|
||||||
|
|
||||||
if err = m.makeOptionFile(); err != nil {
|
if err = m.makeOptionFile(); err != nil {
|
||||||
return errors.Wrap(err, "couldn't make option file")
|
return errors.Prefix("couldn't make option file", err)
|
||||||
}
|
}
|
||||||
|
|
||||||
if err = m.dropTestDB(); err != nil {
|
if err = m.dropTestDB(); err != nil {
|
||||||
return err
|
return errors.Err(err)
|
||||||
}
|
}
|
||||||
if err = m.createTestDB(); err != nil {
|
if err = m.createTestDB(); err != nil {
|
||||||
return err
|
return errors.Err(err)
|
||||||
}
|
}
|
||||||
|
|
||||||
dumpCmd := exec.Command("mysqldump", m.defaultsFile(), "--no-data", m.dbName)
|
dumpCmd := exec.Command("mysqldump", m.defaultsFile(), "--no-data", m.dbName)
|
||||||
|
@ -48,22 +48,22 @@ func (m *mysqlTester) setup() error {
|
||||||
createCmd.Stdin = newFKeyDestroyer(rgxMySQLkey, r)
|
createCmd.Stdin = newFKeyDestroyer(rgxMySQLkey, r)
|
||||||
|
|
||||||
if err = dumpCmd.Start(); err != nil {
|
if err = dumpCmd.Start(); err != nil {
|
||||||
return errors.Wrap(err, "failed to start mysqldump command")
|
return errors.Prefix("failed to start mysqldump command", err)
|
||||||
}
|
}
|
||||||
if err = createCmd.Start(); err != nil {
|
if err = createCmd.Start(); err != nil {
|
||||||
return errors.Wrap(err, "failed to start mysql command")
|
return errors.Prefix("failed to start mysql command", err)
|
||||||
}
|
}
|
||||||
|
|
||||||
if err = dumpCmd.Wait(); err != nil {
|
if err = dumpCmd.Wait(); err != nil {
|
||||||
fmt.Println(err)
|
fmt.Println(err)
|
||||||
return errors.Wrap(err, "failed to wait for mysqldump command")
|
return errors.Prefix("failed to wait for mysqldump command", err)
|
||||||
}
|
}
|
||||||
|
|
||||||
w.Close() // After dumpCmd is done, close the write end of the pipe
|
w.Close() // After dumpCmd is done, close the write end of the pipe
|
||||||
|
|
||||||
if err = createCmd.Wait(); err != nil {
|
if err = createCmd.Wait(); err != nil {
|
||||||
fmt.Println(err)
|
fmt.Println(err)
|
||||||
return errors.Wrap(err, "failed to wait for mysql command")
|
return errors.Prefix("failed to wait for mysql command", err)
|
||||||
}
|
}
|
||||||
|
|
||||||
return nil
|
return nil
|
||||||
|
@ -87,7 +87,15 @@ func (m *mysqlTester) defaultsFile() string {
|
||||||
func (m *mysqlTester) makeOptionFile() error {
|
func (m *mysqlTester) makeOptionFile() error {
|
||||||
tmp, err := ioutil.TempFile("", "optionfile")
|
tmp, err := ioutil.TempFile("", "optionfile")
|
||||||
if err != nil {
|
if err != nil {
|
||||||
return errors.Wrap(err, "failed to create option file")
|
return errors.Prefix("failed to create option file", err)
|
||||||
|
}
|
||||||
|
|
||||||
|
isTCP := false
|
||||||
|
_, err = os.Stat(m.host)
|
||||||
|
if os.IsNotExist(err) {
|
||||||
|
isTCP = true
|
||||||
|
} else if err != nil {
|
||||||
|
return errors.Prefix("could not stat m.host", err)
|
||||||
}
|
}
|
||||||
|
|
||||||
fmt.Fprintln(tmp, "[client]")
|
fmt.Fprintln(tmp, "[client]")
|
||||||
|
@ -96,6 +104,9 @@ func (m *mysqlTester) makeOptionFile() error {
|
||||||
fmt.Fprintf(tmp, "user=%s\n", m.user)
|
fmt.Fprintf(tmp, "user=%s\n", m.user)
|
||||||
fmt.Fprintf(tmp, "password=%s\n", m.pass)
|
fmt.Fprintf(tmp, "password=%s\n", m.pass)
|
||||||
fmt.Fprintf(tmp, "ssl-mode=%s\n", m.sslMode(m.sslmode))
|
fmt.Fprintf(tmp, "ssl-mode=%s\n", m.sslMode(m.sslmode))
|
||||||
|
if isTCP {
|
||||||
|
fmt.Fprintln(tmp, "protocol=tcp")
|
||||||
|
}
|
||||||
|
|
||||||
fmt.Fprintln(tmp, "[mysqldump]")
|
fmt.Fprintln(tmp, "[mysqldump]")
|
||||||
fmt.Fprintf(tmp, "host=%s\n", m.host)
|
fmt.Fprintf(tmp, "host=%s\n", m.host)
|
||||||
|
@ -103,6 +114,9 @@ func (m *mysqlTester) makeOptionFile() error {
|
||||||
fmt.Fprintf(tmp, "user=%s\n", m.user)
|
fmt.Fprintf(tmp, "user=%s\n", m.user)
|
||||||
fmt.Fprintf(tmp, "password=%s\n", m.pass)
|
fmt.Fprintf(tmp, "password=%s\n", m.pass)
|
||||||
fmt.Fprintf(tmp, "ssl-mode=%s\n", m.sslMode(m.sslmode))
|
fmt.Fprintf(tmp, "ssl-mode=%s\n", m.sslMode(m.sslmode))
|
||||||
|
if isTCP {
|
||||||
|
fmt.Fprintln(tmp, "protocol=tcp")
|
||||||
|
}
|
||||||
|
|
||||||
m.optionFile = tmp.Name()
|
m.optionFile = tmp.Name()
|
||||||
|
|
||||||
|
@ -125,7 +139,7 @@ func (m *mysqlTester) teardown() error {
|
||||||
}
|
}
|
||||||
|
|
||||||
if err := m.dropTestDB(); err != nil {
|
if err := m.dropTestDB(); err != nil {
|
||||||
return err
|
return errors.Err(err)
|
||||||
}
|
}
|
||||||
|
|
||||||
return os.Remove(m.optionFile)
|
return os.Remove(m.optionFile)
|
||||||
|
@ -145,7 +159,7 @@ func (m *mysqlTester) runCmd(stdin, command string, args ...string) error {
|
||||||
fmt.Println("failed running:", command, args)
|
fmt.Println("failed running:", command, args)
|
||||||
fmt.Println(stdout.String())
|
fmt.Println(stdout.String())
|
||||||
fmt.Println(stderr.String())
|
fmt.Println(stderr.String())
|
||||||
return err
|
return errors.Err(err)
|
||||||
}
|
}
|
||||||
|
|
||||||
return nil
|
return nil
|
||||||
|
|
|
@ -33,14 +33,14 @@ func (p *pgTester) setup() error {
|
||||||
p.testDBName = randomize.StableDBName(p.dbName)
|
p.testDBName = randomize.StableDBName(p.dbName)
|
||||||
|
|
||||||
if err = p.makePGPassFile(); err != nil {
|
if err = p.makePGPassFile(); err != nil {
|
||||||
return err
|
return errors.Err(err)
|
||||||
}
|
}
|
||||||
|
|
||||||
if err = p.dropTestDB(); err != nil {
|
if err = p.dropTestDB(); err != nil {
|
||||||
return err
|
return errors.Err(err)
|
||||||
}
|
}
|
||||||
if err = p.createTestDB(); err != nil {
|
if err = p.createTestDB(); err != nil {
|
||||||
return err
|
return errors.Err(err)
|
||||||
}
|
}
|
||||||
|
|
||||||
dumpCmd := exec.Command("pg_dump", "--schema-only", p.dbName)
|
dumpCmd := exec.Command("pg_dump", "--schema-only", p.dbName)
|
||||||
|
@ -53,22 +53,22 @@ func (p *pgTester) setup() error {
|
||||||
createCmd.Stdin = newFKeyDestroyer(rgxPGFkey, r)
|
createCmd.Stdin = newFKeyDestroyer(rgxPGFkey, r)
|
||||||
|
|
||||||
if err = dumpCmd.Start(); err != nil {
|
if err = dumpCmd.Start(); err != nil {
|
||||||
return errors.Wrap(err, "failed to start pg_dump command")
|
return errors.Prefix("failed to start pg_dump command", err)
|
||||||
}
|
}
|
||||||
if err = createCmd.Start(); err != nil {
|
if err = createCmd.Start(); err != nil {
|
||||||
return errors.Wrap(err, "failed to start psql command")
|
return errors.Prefix("failed to start psql command", err)
|
||||||
}
|
}
|
||||||
|
|
||||||
if err = dumpCmd.Wait(); err != nil {
|
if err = dumpCmd.Wait(); err != nil {
|
||||||
fmt.Println(err)
|
fmt.Println(err)
|
||||||
return errors.Wrap(err, "failed to wait for pg_dump command")
|
return errors.Prefix("failed to wait for pg_dump command", err)
|
||||||
}
|
}
|
||||||
|
|
||||||
w.Close() // After dumpCmd is done, close the write end of the pipe
|
w.Close() // After dumpCmd is done, close the write end of the pipe
|
||||||
|
|
||||||
if err = createCmd.Wait(); err != nil {
|
if err = createCmd.Wait(); err != nil {
|
||||||
fmt.Println(err)
|
fmt.Println(err)
|
||||||
return errors.Wrap(err, "failed to wait for psql command")
|
return errors.Prefix("failed to wait for psql command", err)
|
||||||
}
|
}
|
||||||
|
|
||||||
return nil
|
return nil
|
||||||
|
@ -90,7 +90,7 @@ func (p *pgTester) runCmd(stdin, command string, args ...string) error {
|
||||||
fmt.Println("failed running:", command, args)
|
fmt.Println("failed running:", command, args)
|
||||||
fmt.Println(stdout.String())
|
fmt.Println(stdout.String())
|
||||||
fmt.Println(stderr.String())
|
fmt.Println(stderr.String())
|
||||||
return err
|
return errors.Err(err)
|
||||||
}
|
}
|
||||||
|
|
||||||
return nil
|
return nil
|
||||||
|
@ -108,7 +108,7 @@ func (p *pgTester) pgEnv() []string {
|
||||||
func (p *pgTester) makePGPassFile() error {
|
func (p *pgTester) makePGPassFile() error {
|
||||||
tmp, err := ioutil.TempFile("", "pgpass")
|
tmp, err := ioutil.TempFile("", "pgpass")
|
||||||
if err != nil {
|
if err != nil {
|
||||||
return errors.Wrap(err, "failed to create option file")
|
return errors.Prefix("failed to create option file", err)
|
||||||
}
|
}
|
||||||
|
|
||||||
fmt.Fprintf(tmp, "%s:%d:postgres:%s", p.host, p.port, p.user)
|
fmt.Fprintf(tmp, "%s:%d:postgres:%s", p.host, p.port, p.user)
|
||||||
|
@ -145,12 +145,12 @@ func (p *pgTester) dropTestDB() error {
|
||||||
func (p *pgTester) teardown() error {
|
func (p *pgTester) teardown() error {
|
||||||
var err error
|
var err error
|
||||||
if err = p.dbConn.Close(); err != nil {
|
if err = p.dbConn.Close(); err != nil {
|
||||||
return err
|
return errors.Err(err)
|
||||||
}
|
}
|
||||||
p.dbConn = nil
|
p.dbConn = nil
|
||||||
|
|
||||||
if err = p.dropTestDB(); err != nil {
|
if err = p.dropTestDB(); err != nil {
|
||||||
return err
|
return errors.Err(err)
|
||||||
}
|
}
|
||||||
|
|
||||||
return os.Remove(p.pgPassFile)
|
return os.Remove(p.pgPassFile)
|
||||||
|
|
|
@ -50,7 +50,7 @@ func test{{$txt.LocalTable.NameGo}}OneToOne{{$txt.ForeignTable.NameGo}}Using{{$t
|
||||||
}
|
}
|
||||||
|
|
||||||
slice := {{$txt.LocalTable.NameGo}}Slice{&local}
|
slice := {{$txt.LocalTable.NameGo}}Slice{&local}
|
||||||
if err = local.L.Load{{$txt.Function.Name}}(tx, false, &slice); err != nil {
|
if err = local.L.Load{{$txt.Function.Name}}(tx, false, (*[]*{{$txt.LocalTable.NameGo}})(&slice)); err != nil {
|
||||||
t.Fatal(err)
|
t.Fatal(err)
|
||||||
}
|
}
|
||||||
if local.R.{{$txt.Function.Name}} == nil {
|
if local.R.{{$txt.Function.Name}} == nil {
|
||||||
|
|
|
@ -28,7 +28,7 @@ func test{{$txt.LocalTable.NameGo}}ToMany{{$txt.Function.Name}}(t *testing.T) {
|
||||||
{{if .Nullable -}}
|
{{if .Nullable -}}
|
||||||
a.{{.Column | titleCase}}.Valid = true
|
a.{{.Column | titleCase}}.Valid = true
|
||||||
{{- end}}
|
{{- end}}
|
||||||
{{- if .ForeignColumnNullable -}}
|
{{- if .ForeignColumnNullable}}
|
||||||
b.{{.ForeignColumn | titleCase}}.Valid = true
|
b.{{.ForeignColumn | titleCase}}.Valid = true
|
||||||
c.{{.ForeignColumn | titleCase}}.Valid = true
|
c.{{.ForeignColumn | titleCase}}.Valid = true
|
||||||
{{- end}}
|
{{- end}}
|
||||||
|
@ -87,7 +87,7 @@ func test{{$txt.LocalTable.NameGo}}ToMany{{$txt.Function.Name}}(t *testing.T) {
|
||||||
}
|
}
|
||||||
|
|
||||||
slice := {{$txt.LocalTable.NameGo}}Slice{&a}
|
slice := {{$txt.LocalTable.NameGo}}Slice{&a}
|
||||||
if err = a.L.Load{{$txt.Function.Name}}(tx, false, &slice); err != nil {
|
if err = a.L.Load{{$txt.Function.Name}}(tx, false, (*[]*{{$txt.LocalTable.NameGo}})(&slice)); err != nil {
|
||||||
t.Fatal(err)
|
t.Fatal(err)
|
||||||
}
|
}
|
||||||
if got := len(a.R.{{$txt.Function.Name}}); got != 2 {
|
if got := len(a.R.{{$txt.Function.Name}}); got != 2 {
|
||||||
|
|
|
@ -159,12 +159,16 @@ func test{{$txt.LocalTable.NameGo}}ToManySetOp{{$txt.Function.Name}}(t *testing.
|
||||||
|
|
||||||
{{- if .ToJoinTable}}
|
{{- if .ToJoinTable}}
|
||||||
|
|
||||||
if len(b.R.{{$txt.Function.ForeignName}}) != 0 {
|
// The following checks cannot be implemented since we have no handle
|
||||||
t.Error("relationship was not removed properly from the slice")
|
// to these when we call Set(). Leaving them here as wishful thinking
|
||||||
}
|
// and to let people know there's dragons.
|
||||||
if len(c.R.{{$txt.Function.ForeignName}}) != 0 {
|
//
|
||||||
t.Error("relationship was not removed properly from the slice")
|
// if len(b.R.{{$txt.Function.ForeignName}}) != 0 {
|
||||||
}
|
// t.Error("relationship was not removed properly from the slice")
|
||||||
|
// }
|
||||||
|
// if len(c.R.{{$txt.Function.ForeignName}}) != 0 {
|
||||||
|
// t.Error("relationship was not removed properly from the slice")
|
||||||
|
// }
|
||||||
if d.R.{{$txt.Function.ForeignName}}[0] != &a {
|
if d.R.{{$txt.Function.ForeignName}}[0] != &a {
|
||||||
t.Error("relationship was not added properly to the slice")
|
t.Error("relationship was not added properly to the slice")
|
||||||
}
|
}
|
||||||
|
|
|
@ -13,10 +13,10 @@ func test{{$txt.LocalTable.NameGo}}ToOne{{$txt.ForeignTable.NameGo}}Using{{$txt.
|
||||||
var foreign {{$txt.ForeignTable.NameGo}}
|
var foreign {{$txt.ForeignTable.NameGo}}
|
||||||
|
|
||||||
seed := randomize.NewSeed()
|
seed := randomize.NewSeed()
|
||||||
if err := randomize.Struct(seed, &local, {{$varNameSingular}}DBTypes, true, {{$varNameSingular}}ColumnsWithDefault...); err != nil {
|
if err := randomize.Struct(seed, &local, {{$varNameSingular}}DBTypes, {{if .Nullable}}true{{else}}false{{end}}, {{$varNameSingular}}ColumnsWithDefault...); err != nil {
|
||||||
t.Errorf("Unable to randomize {{$txt.LocalTable.NameGo}} struct: %s", err)
|
t.Errorf("Unable to randomize {{$txt.LocalTable.NameGo}} struct: %s", err)
|
||||||
}
|
}
|
||||||
if err := randomize.Struct(seed, &foreign, {{$foreignVarNameSingular}}DBTypes, true, {{$foreignVarNameSingular}}ColumnsWithDefault...); err != nil {
|
if err := randomize.Struct(seed, &foreign, {{$foreignVarNameSingular}}DBTypes, {{if .ForeignColumnNullable}}true{{else}}false{{end}}, {{$foreignVarNameSingular}}ColumnsWithDefault...); err != nil {
|
||||||
t.Errorf("Unable to randomize {{$txt.ForeignTable.NameGo}} struct: %s", err)
|
t.Errorf("Unable to randomize {{$txt.ForeignTable.NameGo}} struct: %s", err)
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -50,7 +50,7 @@ func test{{$txt.LocalTable.NameGo}}ToOne{{$txt.ForeignTable.NameGo}}Using{{$txt.
|
||||||
}
|
}
|
||||||
|
|
||||||
slice := {{$txt.LocalTable.NameGo}}Slice{&local}
|
slice := {{$txt.LocalTable.NameGo}}Slice{&local}
|
||||||
if err = local.L.Load{{$txt.Function.Name}}(tx, false, &slice); err != nil {
|
if err = local.L.Load{{$txt.Function.Name}}(tx, false, (*[]*{{$txt.LocalTable.NameGo}})(&slice)); err != nil {
|
||||||
t.Fatal(err)
|
t.Fatal(err)
|
||||||
}
|
}
|
||||||
if local.R.{{$txt.Function.Name}} == nil {
|
if local.R.{{$txt.Function.Name}} == nil {
|
||||||
|
|
|
@ -104,6 +104,12 @@ func setConfigDefaults() {
|
||||||
if viper.GetInt("mysql.port") == 0 {
|
if viper.GetInt("mysql.port") == 0 {
|
||||||
viper.Set("mysql.port", 3306)
|
viper.Set("mysql.port", 3306)
|
||||||
}
|
}
|
||||||
|
if viper.GetString("mssql.sslmode") == "" {
|
||||||
|
viper.Set("mssql.sslmode", "true")
|
||||||
|
}
|
||||||
|
if viper.GetInt("mssql.port") == 0 {
|
||||||
|
viper.Set("mssql.port", 1433)
|
||||||
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
func validateConfig(driverName string) error {
|
func validateConfig(driverName string) error {
|
||||||
|
@ -127,5 +133,15 @@ func validateConfig(driverName string) error {
|
||||||
).Check()
|
).Check()
|
||||||
}
|
}
|
||||||
|
|
||||||
return errors.New("not a valid driver name")
|
if driverName == "mssql" {
|
||||||
|
return vala.BeginValidation().Validate(
|
||||||
|
vala.StringNotEmpty(viper.GetString("mssql.user"), "mssql.user"),
|
||||||
|
vala.StringNotEmpty(viper.GetString("mssql.host"), "mssql.host"),
|
||||||
|
vala.Not(vala.Equals(viper.GetInt("mssql.port"), 0, "mssql.port")),
|
||||||
|
vala.StringNotEmpty(viper.GetString("mssql.dbname"), "mssql.dbname"),
|
||||||
|
vala.StringNotEmpty(viper.GetString("mssql.sslmode"), "mssql.sslmode"),
|
||||||
|
).Check()
|
||||||
|
}
|
||||||
|
|
||||||
|
return errors.Err("not a valid driver name")
|
||||||
}
|
}
|
||||||
|
|
|
@ -9,6 +9,7 @@ func MustTx(transactor boil.Transactor, err error) boil.Transactor {
|
||||||
|
|
||||||
var rgxPGFkey = regexp.MustCompile(`(?m)^ALTER TABLE ONLY .*\n\s+ADD CONSTRAINT .*? FOREIGN KEY .*?;\n`)
|
var rgxPGFkey = regexp.MustCompile(`(?m)^ALTER TABLE ONLY .*\n\s+ADD CONSTRAINT .*? FOREIGN KEY .*?;\n`)
|
||||||
var rgxMySQLkey = regexp.MustCompile(`(?m)((,\n)?\s+CONSTRAINT.*?FOREIGN KEY.*?\n)+`)
|
var rgxMySQLkey = regexp.MustCompile(`(?m)((,\n)?\s+CONSTRAINT.*?FOREIGN KEY.*?\n)+`)
|
||||||
|
var rgxMSSQLkey = regexp.MustCompile(`(?m)^ALTER TABLE .*ADD\s+CONSTRAINT .* FOREIGN KEY.*?.*\n?REFERENCES.*`)
|
||||||
|
|
||||||
func newFKeyDestroyer(regex *regexp.Regexp, reader io.Reader) io.Reader {
|
func newFKeyDestroyer(regex *regexp.Regexp, reader io.Reader) io.Reader {
|
||||||
return &fKeyDestroyer{
|
return &fKeyDestroyer{
|
||||||
|
|
|
@ -251,7 +251,7 @@ func TestToManySet(t *testing.T) {
|
||||||
{{- if $table.IsJoinTable -}}
|
{{- if $table.IsJoinTable -}}
|
||||||
{{- else -}}
|
{{- else -}}
|
||||||
{{- range $table.ToManyRelationships -}}
|
{{- range $table.ToManyRelationships -}}
|
||||||
{{- if not .ForeignColumnNullable -}}
|
{{- if not (or .ForeignColumnNullable .ToJoinTable)}}
|
||||||
{{- else -}}
|
{{- else -}}
|
||||||
{{- $txt := txtsFromToMany $dot.Tables $table . -}}
|
{{- $txt := txtsFromToMany $dot.Tables $table . -}}
|
||||||
t.Run("{{$txt.LocalTable.NameGo}}To{{$txt.Function.Name}}", test{{$txt.LocalTable.NameGo}}ToManySetOp{{$txt.Function.Name}})
|
t.Run("{{$txt.LocalTable.NameGo}}To{{$txt.Function.Name}}", test{{$txt.LocalTable.NameGo}}ToManySetOp{{$txt.Function.Name}})
|
||||||
|
@ -268,7 +268,7 @@ func TestToManyRemove(t *testing.T) {
|
||||||
{{- if $table.IsJoinTable -}}
|
{{- if $table.IsJoinTable -}}
|
||||||
{{- else -}}
|
{{- else -}}
|
||||||
{{- range $table.ToManyRelationships -}}
|
{{- range $table.ToManyRelationships -}}
|
||||||
{{- if not .ForeignColumnNullable -}}
|
{{- if not (or .ForeignColumnNullable .ToJoinTable)}}
|
||||||
{{- else -}}
|
{{- else -}}
|
||||||
{{- $txt := txtsFromToMany $dot.Tables $table . -}}
|
{{- $txt := txtsFromToMany $dot.Tables $table . -}}
|
||||||
t.Run("{{$txt.LocalTable.NameGo}}To{{$txt.Function.Name}}", test{{$txt.LocalTable.NameGo}}ToManyRemoveOp{{$txt.Function.Name}})
|
t.Run("{{$txt.LocalTable.NameGo}}To{{$txt.Function.Name}}", test{{$txt.LocalTable.NameGo}}ToManyRemoveOp{{$txt.Function.Name}})
|
||||||
|
|
|
@ -12,7 +12,7 @@ func test{{$tableNamePlural}}Update(t *testing.T) {
|
||||||
seed := randomize.NewSeed()
|
seed := randomize.NewSeed()
|
||||||
var err error
|
var err error
|
||||||
{{$varNameSingular}} := &{{$tableNameSingular}}{}
|
{{$varNameSingular}} := &{{$tableNameSingular}}{}
|
||||||
if err = randomize.Struct(seed, {{$varNameSingular}}, {{$varNameSingular}}DBTypes, true); err != nil {
|
if err = randomize.Struct(seed, {{$varNameSingular}}, {{$varNameSingular}}DBTypes, true, {{$varNameSingular}}ColumnsWithDefault...); err != nil {
|
||||||
t.Errorf("Unable to randomize {{$tableNameSingular}} struct: %s", err)
|
t.Errorf("Unable to randomize {{$tableNameSingular}} struct: %s", err)
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -50,7 +50,7 @@ func test{{$tableNamePlural}}SliceUpdateAll(t *testing.T) {
|
||||||
seed := randomize.NewSeed()
|
seed := randomize.NewSeed()
|
||||||
var err error
|
var err error
|
||||||
{{$varNameSingular}} := &{{$tableNameSingular}}{}
|
{{$varNameSingular}} := &{{$tableNameSingular}}{}
|
||||||
if err = randomize.Struct(seed, {{$varNameSingular}}, {{$varNameSingular}}DBTypes, true); err != nil {
|
if err = randomize.Struct(seed, {{$varNameSingular}}, {{$varNameSingular}}DBTypes, true, {{$varNameSingular}}ColumnsWithDefault...); err != nil {
|
||||||
t.Errorf("Unable to randomize {{$tableNameSingular}} struct: %s", err)
|
t.Errorf("Unable to randomize {{$tableNameSingular}} struct: %s", err)
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -82,6 +82,12 @@ func test{{$tableNamePlural}}SliceUpdateAll(t *testing.T) {
|
||||||
{{$varNameSingular}}Columns,
|
{{$varNameSingular}}Columns,
|
||||||
{{$varNameSingular}}PrimaryKeyColumns,
|
{{$varNameSingular}}PrimaryKeyColumns,
|
||||||
)
|
)
|
||||||
|
{{- if eq .DriverName "mssql"}}
|
||||||
|
fields = strmangle.SetComplement(
|
||||||
|
fields,
|
||||||
|
{{$varNameSingular}}ColumnsWithAuto,
|
||||||
|
)
|
||||||
|
{{- end}}
|
||||||
}
|
}
|
||||||
|
|
||||||
value := reflect.Indirect(reflect.ValueOf({{$varNameSingular}}))
|
value := reflect.Indirect(reflect.ValueOf({{$varNameSingular}}))
|
||||||
|
|
35
testdata/Dockerfile
vendored
Normal file
35
testdata/Dockerfile
vendored
Normal file
|
@ -0,0 +1,35 @@
|
||||||
|
# This Dockerfile builds the image used for CI/testing.
|
||||||
|
FROM ubuntu:16.04
|
||||||
|
|
||||||
|
ENV PATH /usr/local/sbin:/usr/local/bin:/usr/sbin:/usr/bin:/sbin:/bin:/usr/local/go/bin:/opt/mssql-tools/bin
|
||||||
|
ENV GODIST go1.8.linux-amd64.tar.gz
|
||||||
|
|
||||||
|
# Set up locales for sqlcmd (otherwise it breaks)
|
||||||
|
RUN locale-gen en_US.UTF-8 \
|
||||||
|
&& echo "LC_ALL=en_US.UTF-8" >> /etc/default/locale \
|
||||||
|
&& echo "LANG=en_US.UTF-8" >> /etc/default/locale
|
||||||
|
|
||||||
|
# Install bootstrap-y tools
|
||||||
|
RUN apt-get update \
|
||||||
|
&& apt-get install -y apt-transport-https software-properties-common python3-software-properties \
|
||||||
|
&& apt-add-repository ppa:git-core/ppa \
|
||||||
|
&& apt-get update \
|
||||||
|
&& apt-get install -y curl git
|
||||||
|
|
||||||
|
# Install database clients
|
||||||
|
# MySQL 8.0 is still in development, so we're using 5.7 which is already
|
||||||
|
# available in Ubuntu 16.04
|
||||||
|
RUN curl https://www.postgresql.org/media/keys/ACCC4CF8.asc | apt-key add - \
|
||||||
|
&& echo 'deb http://apt.postgresql.org/pub/repos/apt/ xenial-pgdg main' > /etc/apt/sources.list.d/psql.list \
|
||||||
|
&& curl https://packages.microsoft.com/keys/microsoft.asc | apt-key add - \
|
||||||
|
&& curl https://packages.microsoft.com/config/ubuntu/16.04/prod.list > /etc/apt/sources.list.d/msprod.list \
|
||||||
|
&& apt-get update \
|
||||||
|
&& env ACCEPT_EULA=Y apt-get install -y git postgresql-client-9.6 mysql-client-5.7 mssql-tools unixodbc-dev
|
||||||
|
|
||||||
|
# Install Go
|
||||||
|
RUN curl -o $GODIST https://storage.googleapis.com/golang/$GODIST \
|
||||||
|
&& rm -rf /usr/local/go \
|
||||||
|
&& tar -C /usr/local -xzf $GODIST
|
||||||
|
|
||||||
|
RUN go get -u -v github.com/jstemmer/go-junit-report \
|
||||||
|
&& mv /root/go/bin/go-junit-report /usr/bin/go-junit-report
|
439
testdata/mssql_test_schema.sql
vendored
Normal file
439
testdata/mssql_test_schema.sql
vendored
Normal file
|
@ -0,0 +1,439 @@
|
||||||
|
CREATE TABLE magic
|
||||||
|
(
|
||||||
|
id int NOT NULL IDENTITY (1,1) PRIMARY KEY,
|
||||||
|
id_two int NOT NULL,
|
||||||
|
id_three int,
|
||||||
|
bit_zero bit,
|
||||||
|
bit_one bit NULL,
|
||||||
|
bit_two bit NOT NULL,
|
||||||
|
bit_three bit NULL DEFAULT 0,
|
||||||
|
bit_four bit NULL DEFAULT 1,
|
||||||
|
bit_five bit NOT NULL DEFAULT 0,
|
||||||
|
bit_six bit NOT NULL DEFAULT 1,
|
||||||
|
string_zero VARCHAR(1),
|
||||||
|
string_one VARCHAR(1) NULL,
|
||||||
|
string_two VARCHAR(1) NOT NULL,
|
||||||
|
string_three VARCHAR(1) NULL DEFAULT 'a',
|
||||||
|
string_four VARCHAR(1) NOT NULL DEFAULT 'b',
|
||||||
|
string_five VARCHAR(1000),
|
||||||
|
string_six VARCHAR(1000) NULL,
|
||||||
|
string_seven VARCHAR(1000) NOT NULL,
|
||||||
|
string_eight VARCHAR(1000) NULL DEFAULT 'abcdefgh',
|
||||||
|
string_nine VARCHAR(1000) NOT NULL DEFAULT 'abcdefgh',
|
||||||
|
string_ten VARCHAR(1000) NULL DEFAULT '',
|
||||||
|
string_eleven VARCHAR(1000) NOT NULL DEFAULT '',
|
||||||
|
big_int_zero bigint,
|
||||||
|
big_int_one bigint NULL,
|
||||||
|
big_int_two bigint NOT NULL,
|
||||||
|
big_int_three bigint NULL DEFAULT 111111,
|
||||||
|
big_int_four bigint NOT NULL DEFAULT 222222,
|
||||||
|
big_int_five bigint NULL DEFAULT 0,
|
||||||
|
big_int_six bigint NOT NULL DEFAULT 0,
|
||||||
|
int_zero int,
|
||||||
|
int_one int NULL,
|
||||||
|
int_two int NOT NULL,
|
||||||
|
int_three int NULL DEFAULT 333333,
|
||||||
|
int_four int NOT NULL DEFAULT 444444,
|
||||||
|
int_five int NULL DEFAULT 0,
|
||||||
|
int_six int NOT NULL DEFAULT 0,
|
||||||
|
float_zero float,
|
||||||
|
float_one float,
|
||||||
|
float_two float(24),
|
||||||
|
float_three float(24),
|
||||||
|
float_four float(24) NULL,
|
||||||
|
float_five float(24) NOT NULL,
|
||||||
|
float_six float(24) NULL DEFAULT 1.1,
|
||||||
|
float_seven float(24) NOT NULL DEFAULT 1.1,
|
||||||
|
float_eight float(24) NULL DEFAULT 0.0,
|
||||||
|
float_nine float(24) NULL DEFAULT 0.0,
|
||||||
|
bytea_zero binary NOT NULL,
|
||||||
|
bytea_one binary NOT NULL,
|
||||||
|
bytea_two binary NOT NULL,
|
||||||
|
bytea_three binary NOT NULL DEFAULT CONVERT(VARBINARY(MAX),'a'),
|
||||||
|
bytea_four binary NOT NULL DEFAULT CONVERT(VARBINARY(MAX),'b'),
|
||||||
|
bytea_five binary(100) NOT NULL DEFAULT CONVERT(VARBINARY(MAX),'abcdefghabcdefghabcdefgh'),
|
||||||
|
bytea_six binary(100) NOT NULL DEFAULT CONVERT(VARBINARY(MAX),'hgfedcbahgfedcbahgfedcba'),
|
||||||
|
bytea_seven binary NOT NULL DEFAULT CONVERT(VARBINARY(MAX),''),
|
||||||
|
bytea_eight binary NOT NULL DEFAULT CONVERT(VARBINARY(MAX),''),
|
||||||
|
time_zero timestamp NOT NULL,
|
||||||
|
time_one date,
|
||||||
|
time_eleven date NULL,
|
||||||
|
time_twelve date NOT NULL,
|
||||||
|
time_fifteen date NULL DEFAULT '19990108',
|
||||||
|
time_sixteen date NOT NULL DEFAULT '1999-01-08'
|
||||||
|
);
|
||||||
|
GO
|
||||||
|
|
||||||
|
CREATE TABLE magicest
|
||||||
|
(
|
||||||
|
id int NOT NULL IDENTITY (1,1) PRIMARY KEY,
|
||||||
|
kk float NULL,
|
||||||
|
ll float NOT NULL,
|
||||||
|
mm tinyint NULL,
|
||||||
|
nn tinyint NOT NULL,
|
||||||
|
oo bit NULL,
|
||||||
|
pp bit NOT NULL,
|
||||||
|
qq smallint NULL,
|
||||||
|
rr smallint NOT NULL,
|
||||||
|
ss int NULL,
|
||||||
|
tt int NOT NULL,
|
||||||
|
uu bigint NULL,
|
||||||
|
vv bigint NOT NULL,
|
||||||
|
ww float NULL,
|
||||||
|
xx float NOT NULL,
|
||||||
|
yy float NULL,
|
||||||
|
zz float NOT NULL,
|
||||||
|
aaa double precision NULL,
|
||||||
|
bbb double precision NOT NULL,
|
||||||
|
ccc real NULL,
|
||||||
|
ddd real NOT NULL,
|
||||||
|
ggg date NULL,
|
||||||
|
hhh date NOT NULL,
|
||||||
|
iii datetime NULL,
|
||||||
|
jjj datetime NOT NULL,
|
||||||
|
kkk timestamp NOT NULL,
|
||||||
|
mmm binary NOT NULL,
|
||||||
|
nnn binary NOT NULL,
|
||||||
|
ooo varbinary(100) NOT NULL,
|
||||||
|
ppp varbinary(100) NOT NULL,
|
||||||
|
qqq varbinary NOT NULL,
|
||||||
|
rrr varbinary NOT NULL,
|
||||||
|
www varbinary(max) NOT NULL,
|
||||||
|
xxx varbinary(max) NOT NULL,
|
||||||
|
yyy varchar(100) NULL,
|
||||||
|
zzz varchar(100) NOT NULL,
|
||||||
|
aaaa char NULL,
|
||||||
|
bbbb char NOT NULL,
|
||||||
|
cccc VARCHAR(MAX) NULL,
|
||||||
|
dddd VARCHAR(MAX) NOT NULL,
|
||||||
|
eeee tinyint NULL,
|
||||||
|
ffff tinyint NOT NULL
|
||||||
|
);
|
||||||
|
GO
|
||||||
|
|
||||||
|
create table owner
|
||||||
|
(
|
||||||
|
id int NOT NULL IDENTITY (1,1) PRIMARY KEY,
|
||||||
|
name varchar(255) not null
|
||||||
|
);
|
||||||
|
GO
|
||||||
|
|
||||||
|
create table cats
|
||||||
|
(
|
||||||
|
id int NOT NULL IDENTITY (1,1) PRIMARY KEY,
|
||||||
|
name varchar(255) not null,
|
||||||
|
owner_id int
|
||||||
|
);
|
||||||
|
GO
|
||||||
|
|
||||||
|
ALTER TABLE cats ADD CONSTRAINT cats_owner_id_fkey FOREIGN KEY (owner_id) REFERENCES owner(id);
|
||||||
|
GO
|
||||||
|
|
||||||
|
create table toys
|
||||||
|
(
|
||||||
|
id int NOT NULL IDENTITY (1,1) PRIMARY KEY,
|
||||||
|
name varchar(255) not null
|
||||||
|
);
|
||||||
|
GO
|
||||||
|
|
||||||
|
create table cat_toys
|
||||||
|
(
|
||||||
|
cat_id int not null references cats (id),
|
||||||
|
toy_id int not null references toys (id),
|
||||||
|
primary key (cat_id, toy_id)
|
||||||
|
);
|
||||||
|
GO
|
||||||
|
|
||||||
|
create table dog_toys
|
||||||
|
(
|
||||||
|
dog_id int not null,
|
||||||
|
toy_id int not null,
|
||||||
|
primary key (dog_id, toy_id)
|
||||||
|
);
|
||||||
|
GO
|
||||||
|
|
||||||
|
create table dragon_toys
|
||||||
|
(
|
||||||
|
dragon_id varchar(100),
|
||||||
|
toy_id varchar(100),
|
||||||
|
primary key (dragon_id, toy_id)
|
||||||
|
);
|
||||||
|
GO
|
||||||
|
|
||||||
|
create table spider_toys
|
||||||
|
(
|
||||||
|
spider_id varchar(100) primary key,
|
||||||
|
name varchar(100)
|
||||||
|
);
|
||||||
|
GO
|
||||||
|
|
||||||
|
create table pals
|
||||||
|
(
|
||||||
|
pal varchar(100) primary key,
|
||||||
|
name varchar(100)
|
||||||
|
);
|
||||||
|
GO
|
||||||
|
|
||||||
|
create table friend
|
||||||
|
(
|
||||||
|
friend varchar(100) primary key,
|
||||||
|
name varchar(100)
|
||||||
|
);
|
||||||
|
GO
|
||||||
|
|
||||||
|
create table bro
|
||||||
|
(
|
||||||
|
bros varchar(100) primary key,
|
||||||
|
name varchar(100)
|
||||||
|
);
|
||||||
|
GO
|
||||||
|
|
||||||
|
create table enemies
|
||||||
|
(
|
||||||
|
enemies varchar(100) primary key,
|
||||||
|
name varchar(100)
|
||||||
|
);
|
||||||
|
GO
|
||||||
|
|
||||||
|
create table chocolate
|
||||||
|
(
|
||||||
|
dog varchar(100) primary key
|
||||||
|
);
|
||||||
|
GO
|
||||||
|
|
||||||
|
create table waffles
|
||||||
|
(
|
||||||
|
cat varchar(100) primary key
|
||||||
|
);
|
||||||
|
GO
|
||||||
|
|
||||||
|
create table tigers
|
||||||
|
(
|
||||||
|
id binary primary key,
|
||||||
|
name binary NOT NULL
|
||||||
|
);
|
||||||
|
GO
|
||||||
|
|
||||||
|
create table elephants
|
||||||
|
(
|
||||||
|
id binary primary key,
|
||||||
|
name binary not null,
|
||||||
|
tiger_id binary NOT NULL unique
|
||||||
|
);
|
||||||
|
GO
|
||||||
|
|
||||||
|
ALTER TABLE elephants ADD CONSTRAINT elephants_tiger_id_fkey FOREIGN KEY (tiger_id) REFERENCES tigers(id);
|
||||||
|
GO
|
||||||
|
|
||||||
|
create table wolves
|
||||||
|
(
|
||||||
|
id binary primary key,
|
||||||
|
name binary not null,
|
||||||
|
tiger_id binary not null unique
|
||||||
|
);
|
||||||
|
GO
|
||||||
|
|
||||||
|
ALTER TABLE wolves ADD CONSTRAINT wolves_tiger_id_fkey FOREIGN KEY (tiger_id) REFERENCES tigers(id);
|
||||||
|
GO
|
||||||
|
|
||||||
|
create table ants
|
||||||
|
(
|
||||||
|
id binary primary key,
|
||||||
|
name binary not null,
|
||||||
|
tiger_id binary not null
|
||||||
|
);
|
||||||
|
GO
|
||||||
|
|
||||||
|
ALTER TABLE ants ADD CONSTRAINT ants_tiger_id_fkey FOREIGN KEY (tiger_id) REFERENCES tigers(id);
|
||||||
|
GO
|
||||||
|
|
||||||
|
create table worms
|
||||||
|
(
|
||||||
|
id binary primary key,
|
||||||
|
name binary not null,
|
||||||
|
tiger_id binary NOT NULL
|
||||||
|
);
|
||||||
|
GO
|
||||||
|
|
||||||
|
ALTER TABLE worms ADD CONSTRAINT worms_tiger_id_fkey FOREIGN KEY (tiger_id) REFERENCES tigers(id);
|
||||||
|
GO
|
||||||
|
|
||||||
|
create table byte_pilots
|
||||||
|
(
|
||||||
|
id binary primary key not null,
|
||||||
|
name varchar(255)
|
||||||
|
);
|
||||||
|
GO
|
||||||
|
|
||||||
|
create table byte_airports
|
||||||
|
(
|
||||||
|
id binary primary key not null,
|
||||||
|
name varchar(255)
|
||||||
|
);
|
||||||
|
GO
|
||||||
|
|
||||||
|
create table byte_languages
|
||||||
|
(
|
||||||
|
id binary primary key not null,
|
||||||
|
name varchar(255)
|
||||||
|
);
|
||||||
|
GO
|
||||||
|
|
||||||
|
create table byte_jets
|
||||||
|
(
|
||||||
|
id binary primary key not null,
|
||||||
|
name varchar(255),
|
||||||
|
byte_pilot_id binary unique NOT NULL,
|
||||||
|
byte_airport_id binary NOT NULL
|
||||||
|
);
|
||||||
|
GO
|
||||||
|
|
||||||
|
ALTER TABLE byte_jets ADD CONSTRAINT byte_jets_byte_pilot_id_fkey FOREIGN KEY (byte_pilot_id) REFERENCES byte_pilots(id);
|
||||||
|
GO
|
||||||
|
ALTER TABLE byte_jets ADD CONSTRAINT byte_jets_byte_airport_id_fkey FOREIGN KEY (byte_airport_id) REFERENCES byte_airports(id);
|
||||||
|
GO
|
||||||
|
|
||||||
|
create table byte_pilot_languages
|
||||||
|
(
|
||||||
|
byte_pilot_id binary not null,
|
||||||
|
byte_language_id binary not null
|
||||||
|
);
|
||||||
|
GO
|
||||||
|
|
||||||
|
ALTER TABLE byte_pilot_languages ADD CONSTRAINT byte_pilot_languages_pkey PRIMARY KEY (byte_pilot_id,byte_language_id);
|
||||||
|
GO
|
||||||
|
|
||||||
|
ALTER TABLE byte_pilot_languages ADD CONSTRAINT byte_pilot_languages_byte_pilot_id_fkey FOREIGN KEY (byte_pilot_id) REFERENCES byte_pilots(id);
|
||||||
|
GO
|
||||||
|
ALTER TABLE byte_pilot_languages ADD CONSTRAINT byte_pilot_languages_byte_language_id_fkey FOREIGN KEY (byte_language_id) REFERENCES byte_languages(id);
|
||||||
|
GO
|
||||||
|
|
||||||
|
create table cars
|
||||||
|
(
|
||||||
|
id integer not null,
|
||||||
|
name VARCHAR(MAX),
|
||||||
|
primary key (id)
|
||||||
|
);
|
||||||
|
GO
|
||||||
|
|
||||||
|
create table car_cars
|
||||||
|
(
|
||||||
|
car_id integer not null,
|
||||||
|
awesome_car_id integer not null,
|
||||||
|
relation VARCHAR(MAX) not null,
|
||||||
|
primary key (car_id, awesome_car_id)
|
||||||
|
);
|
||||||
|
GO
|
||||||
|
|
||||||
|
ALTER TABLE car_cars ADD CONSTRAINT car_id_fkey FOREIGN KEY (car_id) REFERENCES cars(id);
|
||||||
|
GO
|
||||||
|
ALTER TABLE car_cars ADD CONSTRAINT awesome_car_id_fkey FOREIGN KEY (awesome_car_id) REFERENCES cars(id);
|
||||||
|
GO
|
||||||
|
|
||||||
|
create table trucks
|
||||||
|
(
|
||||||
|
id integer not null,
|
||||||
|
parent_id integer,
|
||||||
|
name VARCHAR(MAX),
|
||||||
|
primary key (id)
|
||||||
|
);
|
||||||
|
GO
|
||||||
|
|
||||||
|
ALTER TABLE trucks ADD CONSTRAINT parent_id_fkey FOREIGN KEY (parent_id) REFERENCES trucks(id);
|
||||||
|
GO
|
||||||
|
|
||||||
|
CREATE TABLE race
|
||||||
|
(
|
||||||
|
id integer PRIMARY KEY NOT NULL,
|
||||||
|
race_date datetime,
|
||||||
|
track VARCHAR(MAX)
|
||||||
|
);
|
||||||
|
GO
|
||||||
|
|
||||||
|
CREATE TABLE race_results
|
||||||
|
(
|
||||||
|
id integer PRIMARY KEY NOT NULL,
|
||||||
|
race_id integer,
|
||||||
|
name VARCHAR(MAX)
|
||||||
|
);
|
||||||
|
GO
|
||||||
|
|
||||||
|
ALTER TABLE race_results ADD CONSTRAINT race_id_fkey FOREIGN KEY (race_id) REFERENCES race(id);
|
||||||
|
GO
|
||||||
|
|
||||||
|
CREATE TABLE race_result_scratchings
|
||||||
|
(
|
||||||
|
id integer PRIMARY KEY NOT NULL,
|
||||||
|
results_id integer NOT NULL,
|
||||||
|
name VARCHAR(MAX) NOT NULL
|
||||||
|
);
|
||||||
|
GO
|
||||||
|
|
||||||
|
ALTER TABLE race_result_scratchings ADD CONSTRAINT results_id_fkey FOREIGN KEY (results_id) REFERENCES race_results(id);
|
||||||
|
GO
|
||||||
|
|
||||||
|
CREATE TABLE pilots
|
||||||
|
(
|
||||||
|
id integer NOT NULL,
|
||||||
|
name VARCHAR(MAX) NOT NULL
|
||||||
|
);
|
||||||
|
GO
|
||||||
|
|
||||||
|
ALTER TABLE pilots ADD CONSTRAINT pilot_pkey PRIMARY KEY (id);
|
||||||
|
GO
|
||||||
|
|
||||||
|
CREATE TABLE jets
|
||||||
|
(
|
||||||
|
id integer NOT NULL,
|
||||||
|
pilot_id integer NOT NULL,
|
||||||
|
age integer NOT NULL,
|
||||||
|
name VARCHAR(MAX) NOT NULL,
|
||||||
|
color VARCHAR(MAX) NOT NULL
|
||||||
|
);
|
||||||
|
GO
|
||||||
|
|
||||||
|
ALTER TABLE jets ADD CONSTRAINT jet_pkey PRIMARY KEY (id);
|
||||||
|
GO
|
||||||
|
ALTER TABLE jets ADD CONSTRAINT pilots_fkey FOREIGN KEY (pilot_id) REFERENCES pilots(id);
|
||||||
|
GO
|
||||||
|
|
||||||
|
CREATE TABLE languages
|
||||||
|
(
|
||||||
|
id integer NOT NULL,
|
||||||
|
language VARCHAR(MAX) NOT NULL
|
||||||
|
);
|
||||||
|
GO
|
||||||
|
|
||||||
|
ALTER TABLE languages ADD CONSTRAINT language_pkey PRIMARY KEY (id);
|
||||||
|
GO
|
||||||
|
|
||||||
|
-- Join table
|
||||||
|
CREATE TABLE pilot_languages
|
||||||
|
(
|
||||||
|
pilot_id integer NOT NULL,
|
||||||
|
language_id integer NOT NULL,
|
||||||
|
uniqueid uniqueidentifier NOT NULL,
|
||||||
|
);
|
||||||
|
GO
|
||||||
|
|
||||||
|
-- Composite primary key
|
||||||
|
ALTER TABLE pilot_languages ADD CONSTRAINT pilot_language_pkey PRIMARY KEY (pilot_id, language_id);
|
||||||
|
GO
|
||||||
|
ALTER TABLE pilot_languages ADD CONSTRAINT pilot_language_fkey FOREIGN KEY (pilot_id) REFERENCES pilots(id);
|
||||||
|
GO
|
||||||
|
ALTER TABLE pilot_languages ADD CONSTRAINT languages_fkey FOREIGN KEY (language_id) REFERENCES languages(id);
|
||||||
|
GO
|
||||||
|
|
||||||
|
CREATE TABLE powers_of_two
|
||||||
|
(
|
||||||
|
vid int NOT NULL IDENTITY(1,1),
|
||||||
|
name varchar(255) NOT NULL DEFAULT '',
|
||||||
|
machine_name varchar(255) NOT NULL,
|
||||||
|
description VARCHAR(MAX),
|
||||||
|
hierarchy tinyint NOT NULL DEFAULT '0',
|
||||||
|
module varchar(255) NOT NULL DEFAULT '',
|
||||||
|
weight int NOT NULL DEFAULT '0',
|
||||||
|
PRIMARY KEY (vid),
|
||||||
|
CONSTRAINT machine_name UNIQUE(machine_name)
|
||||||
|
);
|
||||||
|
GO
|
86
testdata/mysql_test_schema.sql
vendored
86
testdata/mysql_test_schema.sql
vendored
|
@ -135,7 +135,9 @@ CREATE TABLE magicest (
|
||||||
aaaa char NULL,
|
aaaa char NULL,
|
||||||
bbbb char NOT NULL,
|
bbbb char NOT NULL,
|
||||||
cccc text NULL,
|
cccc text NULL,
|
||||||
dddd text NOT NULL
|
dddd text NOT NULL,
|
||||||
|
eeee tinyint(2) NULL,
|
||||||
|
ffff tinyint(2) NOT NULL
|
||||||
);
|
);
|
||||||
|
|
||||||
create table owner (
|
create table owner (
|
||||||
|
@ -238,40 +240,6 @@ create table worms (
|
||||||
foreign key (tiger_id) references tigers (id)
|
foreign key (tiger_id) references tigers (id)
|
||||||
);
|
);
|
||||||
|
|
||||||
create table pilots (
|
|
||||||
id int primary key not null auto_increment,
|
|
||||||
name varchar(255)
|
|
||||||
);
|
|
||||||
|
|
||||||
create table airports (
|
|
||||||
id int primary key not null auto_increment,
|
|
||||||
name varchar(255)
|
|
||||||
);
|
|
||||||
|
|
||||||
create table languages (
|
|
||||||
id int primary key not null auto_increment,
|
|
||||||
name varchar(255)
|
|
||||||
);
|
|
||||||
|
|
||||||
create table jets (
|
|
||||||
id int primary key not null auto_increment,
|
|
||||||
name varchar(255),
|
|
||||||
pilot_id integer,
|
|
||||||
airport_id integer,
|
|
||||||
|
|
||||||
foreign key (pilot_id) references pilots (id),
|
|
||||||
foreign key (airport_id) references airports (id)
|
|
||||||
);
|
|
||||||
|
|
||||||
create table pilot_languages (
|
|
||||||
pilot_id integer not null,
|
|
||||||
language_id integer not null,
|
|
||||||
|
|
||||||
primary key (pilot_id, language_id),
|
|
||||||
foreign key (pilot_id) references pilots (id),
|
|
||||||
foreign key (language_id) references languages (id)
|
|
||||||
);
|
|
||||||
|
|
||||||
create table byte_pilots (
|
create table byte_pilots (
|
||||||
id binary primary key not null,
|
id binary primary key not null,
|
||||||
name varchar(255)
|
name varchar(255)
|
||||||
|
@ -349,3 +317,51 @@ CREATE TABLE race_result_scratchings (
|
||||||
foreign key (results_id) references race_results(id)
|
foreign key (results_id) references race_results(id)
|
||||||
);
|
);
|
||||||
|
|
||||||
|
CREATE TABLE pilots (
|
||||||
|
id integer NOT NULL,
|
||||||
|
name text NOT NULL
|
||||||
|
);
|
||||||
|
|
||||||
|
ALTER TABLE pilots ADD CONSTRAINT pilot_pkey PRIMARY KEY (id);
|
||||||
|
|
||||||
|
CREATE TABLE jets (
|
||||||
|
id integer NOT NULL,
|
||||||
|
pilot_id integer NOT NULL,
|
||||||
|
age integer NOT NULL,
|
||||||
|
name text NOT NULL,
|
||||||
|
color text NOT NULL
|
||||||
|
);
|
||||||
|
|
||||||
|
ALTER TABLE jets ADD CONSTRAINT jet_pkey PRIMARY KEY (id);
|
||||||
|
ALTER TABLE jets ADD CONSTRAINT pilots_fkey FOREIGN KEY (pilot_id) REFERENCES pilots(id);
|
||||||
|
|
||||||
|
CREATE TABLE languages (
|
||||||
|
id integer NOT NULL,
|
||||||
|
language text NOT NULL
|
||||||
|
);
|
||||||
|
|
||||||
|
ALTER TABLE languages ADD CONSTRAINT language_pkey PRIMARY KEY (id);
|
||||||
|
|
||||||
|
-- Join table
|
||||||
|
CREATE TABLE pilot_languages (
|
||||||
|
pilot_id integer NOT NULL,
|
||||||
|
language_id integer NOT NULL
|
||||||
|
);
|
||||||
|
|
||||||
|
-- Composite primary key
|
||||||
|
ALTER TABLE pilot_languages ADD CONSTRAINT pilot_language_pkey PRIMARY KEY (pilot_id, language_id);
|
||||||
|
ALTER TABLE pilot_languages ADD CONSTRAINT pilot_language_fkey FOREIGN KEY (pilot_id) REFERENCES pilots(id);
|
||||||
|
ALTER TABLE pilot_languages ADD CONSTRAINT languages_fkey FOREIGN KEY (language_id) REFERENCES languages(id);
|
||||||
|
|
||||||
|
CREATE TABLE powers_of_two (
|
||||||
|
vid int(10) unsigned NOT NULL AUTO_INCREMENT,
|
||||||
|
name varchar(255) NOT NULL DEFAULT '',
|
||||||
|
machine_name varchar(255) NOT NULL DEFAULT '',
|
||||||
|
description longtext,
|
||||||
|
hierarchy tinyint(3) unsigned NOT NULL DEFAULT '0',
|
||||||
|
module varchar(255) NOT NULL DEFAULT '',
|
||||||
|
weight int(11) NOT NULL DEFAULT '0',
|
||||||
|
PRIMARY KEY (vid),
|
||||||
|
UNIQUE KEY machine_name (machine_name),
|
||||||
|
KEY list (weight,name)
|
||||||
|
) ENGINE=InnoDB AUTO_INCREMENT=3 DEFAULT CHARSET=utf8;
|
||||||
|
|
76
testdata/postgres_test_schema.sql
vendored
76
testdata/postgres_test_schema.sql
vendored
|
@ -246,7 +246,6 @@ create table enemies (
|
||||||
primary key (enemies)
|
primary key (enemies)
|
||||||
);
|
);
|
||||||
|
|
||||||
|
|
||||||
create table chocolate (
|
create table chocolate (
|
||||||
dog varchar(100) primary key
|
dog varchar(100) primary key
|
||||||
);
|
);
|
||||||
|
@ -307,37 +306,16 @@ create table worms (
|
||||||
foreign key (tiger_id) references tigers (id)
|
foreign key (tiger_id) references tigers (id)
|
||||||
);
|
);
|
||||||
|
|
||||||
create table pilots (
|
create table addresses (
|
||||||
id serial primary key not null,
|
id bytea primary key,
|
||||||
name character varying
|
name bytea null
|
||||||
);
|
);
|
||||||
|
|
||||||
create table airports (
|
create table houses (
|
||||||
id serial primary key not null,
|
id bytea primary key,
|
||||||
name character varying
|
name bytea not null,
|
||||||
);
|
address_id bytea not null unique,
|
||||||
|
foreign key (address_id) references addresses (id)
|
||||||
create table languages (
|
|
||||||
id serial primary key not null,
|
|
||||||
name character varying
|
|
||||||
);
|
|
||||||
|
|
||||||
create table jets (
|
|
||||||
id serial primary key not null,
|
|
||||||
name character varying,
|
|
||||||
pilot_id integer,
|
|
||||||
airport_id integer,
|
|
||||||
foreign key (pilot_id) references pilots (id),
|
|
||||||
foreign key (airport_id) references airports (id)
|
|
||||||
);
|
|
||||||
|
|
||||||
create table pilot_languages (
|
|
||||||
pilot_id integer not null,
|
|
||||||
language_id integer not null,
|
|
||||||
|
|
||||||
primary key (pilot_id, language_id),
|
|
||||||
foreign key (pilot_id) references pilots (id),
|
|
||||||
foreign key (language_id) references languages (id)
|
|
||||||
);
|
);
|
||||||
|
|
||||||
create table byte_pilots (
|
create table byte_pilots (
|
||||||
|
@ -416,3 +394,41 @@ CREATE TABLE race_result_scratchings (
|
||||||
name text NOT NULL,
|
name text NOT NULL,
|
||||||
foreign key (results_id) references race_results(id)
|
foreign key (results_id) references race_results(id)
|
||||||
);
|
);
|
||||||
|
|
||||||
|
CREATE TABLE pilots (
|
||||||
|
id integer NOT NULL,
|
||||||
|
name text NOT NULL
|
||||||
|
);
|
||||||
|
|
||||||
|
ALTER TABLE pilots ADD CONSTRAINT pilot_pkey PRIMARY KEY (id);
|
||||||
|
|
||||||
|
CREATE TABLE jets (
|
||||||
|
id integer NOT NULL,
|
||||||
|
pilot_id integer NOT NULL,
|
||||||
|
age integer NOT NULL,
|
||||||
|
name text NOT NULL,
|
||||||
|
color text NOT NULL
|
||||||
|
);
|
||||||
|
|
||||||
|
ALTER TABLE jets ADD CONSTRAINT jet_pkey PRIMARY KEY (id);
|
||||||
|
-- The following fkey remains poorly named to avoid regressions related to psql naming
|
||||||
|
ALTER TABLE jets ADD CONSTRAINT pilots_fkey FOREIGN KEY (pilot_id) REFERENCES pilots(id);
|
||||||
|
|
||||||
|
CREATE TABLE languages (
|
||||||
|
id integer NOT NULL,
|
||||||
|
language text NOT NULL
|
||||||
|
);
|
||||||
|
|
||||||
|
ALTER TABLE languages ADD CONSTRAINT language_pkey PRIMARY KEY (id);
|
||||||
|
|
||||||
|
-- Join table
|
||||||
|
CREATE TABLE pilot_languages (
|
||||||
|
pilot_id integer NOT NULL,
|
||||||
|
language_id integer NOT NULL
|
||||||
|
);
|
||||||
|
|
||||||
|
-- Composite primary key
|
||||||
|
ALTER TABLE pilot_languages ADD CONSTRAINT pilot_language_pkey PRIMARY KEY (pilot_id, language_id);
|
||||||
|
-- The following fkey remains poorly named to avoid regressions related to psql naming
|
||||||
|
ALTER TABLE pilot_languages ADD CONSTRAINT pilots_fkey FOREIGN KEY (pilot_id) REFERENCES pilots(id);
|
||||||
|
ALTER TABLE pilot_languages ADD CONSTRAINT languages_fkey FOREIGN KEY (language_id) REFERENCES languages(id);
|
||||||
|
|
Loading…
Reference in a new issue