2016-06-23 08:09:56 +02:00
|
|
|
package drivers
|
2016-02-23 09:27:32 +01:00
|
|
|
|
|
|
|
import (
|
|
|
|
"database/sql"
|
|
|
|
"fmt"
|
2017-04-29 06:07:39 +02:00
|
|
|
"os"
|
2016-08-01 05:29:28 +02:00
|
|
|
"strings"
|
2016-02-23 09:27:32 +01:00
|
|
|
|
2016-06-19 23:46:10 +02:00
|
|
|
// Side-effect import sql driver
|
2016-11-10 08:06:09 +01:00
|
|
|
|
2017-05-08 19:25:15 +02:00
|
|
|
"github.com/lbryio/sqlboiler/bdb"
|
|
|
|
"github.com/lbryio/sqlboiler/strmangle"
|
2017-05-11 23:42:20 +02:00
|
|
|
_ "github.com/lib/pq"
|
|
|
|
"github.com/pkg/errors"
|
2016-02-23 09:27:32 +01:00
|
|
|
)
|
|
|
|
|
2016-02-24 10:17:58 +01:00
|
|
|
// PostgresDriver holds the database connection string and a handle
|
|
|
|
// to the database connection.
|
2016-02-23 09:27:32 +01:00
|
|
|
type PostgresDriver struct {
|
|
|
|
connStr string
|
|
|
|
dbConn *sql.DB
|
|
|
|
}
|
|
|
|
|
2016-02-24 10:17:58 +01:00
|
|
|
// NewPostgresDriver takes the database connection details as parameters and
|
|
|
|
// returns a pointer to a PostgresDriver object. Note that it is required to
|
|
|
|
// call PostgresDriver.Open() and PostgresDriver.Close() to open and close
|
|
|
|
// the database connection once an object has been obtained.
|
2016-07-12 00:17:49 +02:00
|
|
|
func NewPostgresDriver(user, pass, dbname, host string, port int, sslmode string) *PostgresDriver {
|
2016-02-23 09:27:32 +01:00
|
|
|
driver := PostgresDriver{
|
2016-09-09 05:41:50 +02:00
|
|
|
connStr: PostgresBuildQueryString(user, pass, dbname, host, port, sslmode),
|
2016-02-23 09:27:32 +01:00
|
|
|
}
|
|
|
|
|
|
|
|
return &driver
|
|
|
|
}
|
|
|
|
|
2016-09-09 05:41:50 +02:00
|
|
|
// PostgresBuildQueryString builds a query string.
|
|
|
|
func PostgresBuildQueryString(user, pass, dbname, host string, port int, sslmode string) string {
|
2016-08-01 05:29:28 +02:00
|
|
|
parts := []string{}
|
|
|
|
if len(user) != 0 {
|
|
|
|
parts = append(parts, fmt.Sprintf("user=%s", user))
|
|
|
|
}
|
|
|
|
if len(pass) != 0 {
|
|
|
|
parts = append(parts, fmt.Sprintf("password=%s", pass))
|
|
|
|
}
|
|
|
|
if len(dbname) != 0 {
|
|
|
|
parts = append(parts, fmt.Sprintf("dbname=%s", dbname))
|
|
|
|
}
|
|
|
|
if len(host) != 0 {
|
|
|
|
parts = append(parts, fmt.Sprintf("host=%s", host))
|
|
|
|
}
|
|
|
|
if port != 0 {
|
|
|
|
parts = append(parts, fmt.Sprintf("port=%d", port))
|
|
|
|
}
|
|
|
|
if len(sslmode) != 0 {
|
|
|
|
parts = append(parts, fmt.Sprintf("sslmode=%s", sslmode))
|
|
|
|
}
|
|
|
|
|
|
|
|
return strings.Join(parts, " ")
|
|
|
|
}
|
|
|
|
|
2016-02-24 10:17:58 +01:00
|
|
|
// Open opens the database connection using the connection string
|
2016-03-23 06:05:23 +01:00
|
|
|
func (p *PostgresDriver) Open() error {
|
2016-02-23 09:27:32 +01:00
|
|
|
var err error
|
2016-03-23 06:05:23 +01:00
|
|
|
p.dbConn, err = sql.Open("postgres", p.connStr)
|
2016-02-23 09:27:32 +01:00
|
|
|
if err != nil {
|
|
|
|
return err
|
|
|
|
}
|
|
|
|
|
|
|
|
return nil
|
|
|
|
}
|
|
|
|
|
2016-02-24 10:17:58 +01:00
|
|
|
// Close closes the database connection
|
2016-03-23 06:05:23 +01:00
|
|
|
func (p *PostgresDriver) Close() {
|
|
|
|
p.dbConn.Close()
|
2016-02-23 09:27:32 +01:00
|
|
|
}
|
|
|
|
|
2016-08-14 01:27:34 +02:00
|
|
|
// UseLastInsertID returns false for postgres
|
|
|
|
func (p *PostgresDriver) UseLastInsertID() bool {
|
|
|
|
return false
|
|
|
|
}
|
|
|
|
|
2017-03-14 09:13:56 +01:00
|
|
|
// UseTopClause returns false to indicate PSQL doesnt support SQL TOP clause
|
|
|
|
func (m *PostgresDriver) UseTopClause() bool {
|
|
|
|
return false
|
|
|
|
}
|
|
|
|
|
2016-06-19 23:46:10 +02:00
|
|
|
// TableNames connects to the postgres database and
|
2016-02-24 10:17:58 +01:00
|
|
|
// retrieves all table names from the information_schema where the
|
2016-09-09 07:41:57 +02:00
|
|
|
// table schema is schema. It uses a whitelist and blacklist.
|
|
|
|
func (p *PostgresDriver) TableNames(schema string, whitelist, blacklist []string) ([]string, error) {
|
2016-03-23 06:05:23 +01:00
|
|
|
var names []string
|
2016-02-23 09:27:32 +01:00
|
|
|
|
2016-09-09 07:42:49 +02:00
|
|
|
query := fmt.Sprintf(`select table_name from information_schema.tables where table_schema = $1`)
|
2016-09-09 05:42:02 +02:00
|
|
|
args := []interface{}{schema}
|
2016-09-05 16:41:12 +02:00
|
|
|
if len(whitelist) > 0 {
|
2016-09-09 21:15:50 +02:00
|
|
|
query += fmt.Sprintf(" and table_name in (%s);", strmangle.Placeholders(true, len(whitelist), 2, 1))
|
2016-09-09 05:42:02 +02:00
|
|
|
for _, w := range whitelist {
|
|
|
|
args = append(args, w)
|
|
|
|
}
|
2016-09-09 07:41:57 +02:00
|
|
|
} else if len(blacklist) > 0 {
|
2016-09-09 21:15:50 +02:00
|
|
|
query += fmt.Sprintf(" and table_name not in (%s);", strmangle.Placeholders(true, len(blacklist), 2, 1))
|
2016-09-09 07:41:57 +02:00
|
|
|
for _, b := range blacklist {
|
|
|
|
args = append(args, b)
|
2016-09-09 05:42:02 +02:00
|
|
|
}
|
2016-08-16 12:38:31 +02:00
|
|
|
}
|
|
|
|
|
2016-09-09 05:42:02 +02:00
|
|
|
rows, err := p.dbConn.Query(query, args...)
|
2016-02-23 09:27:32 +01:00
|
|
|
|
|
|
|
if err != nil {
|
|
|
|
return nil, err
|
|
|
|
}
|
|
|
|
|
|
|
|
defer rows.Close()
|
|
|
|
for rows.Next() {
|
2016-03-23 06:05:23 +01:00
|
|
|
var name string
|
|
|
|
if err := rows.Scan(&name); err != nil {
|
2016-02-23 09:27:32 +01:00
|
|
|
return nil, err
|
|
|
|
}
|
2016-03-23 06:05:23 +01:00
|
|
|
names = append(names, name)
|
2016-02-23 09:27:32 +01:00
|
|
|
}
|
|
|
|
|
2016-03-23 06:05:23 +01:00
|
|
|
return names, nil
|
2016-02-23 09:27:32 +01:00
|
|
|
}
|
|
|
|
|
2016-06-19 23:46:10 +02:00
|
|
|
// Columns takes a table name and attempts to retrieve the table information
|
2016-02-24 10:17:58 +01:00
|
|
|
// from the database information_schema.columns. It retrieves the column names
|
2016-03-23 06:05:23 +01:00
|
|
|
// and column types and returns those as a []Column after TranslateColumnType()
|
2016-02-24 10:17:58 +01:00
|
|
|
// converts the SQL types to Go types, for example: "varchar" to "string"
|
2016-09-08 23:23:10 +02:00
|
|
|
func (p *PostgresDriver) Columns(schema, tableName string) ([]bdb.Column, error) {
|
2016-06-23 08:48:49 +02:00
|
|
|
var columns []bdb.Column
|
2016-03-18 16:27:55 +01:00
|
|
|
|
2016-03-23 06:05:23 +01:00
|
|
|
rows, err := p.dbConn.Query(`
|
2016-11-10 08:06:09 +01:00
|
|
|
select
|
|
|
|
c.column_name,
|
|
|
|
(
|
2017-04-29 06:07:39 +02:00
|
|
|
case when pgt.typtype = 'e'
|
2016-11-10 08:06:09 +01:00
|
|
|
then
|
|
|
|
(
|
2016-11-11 10:01:09 +01:00
|
|
|
select 'enum.' || c.udt_name || '(''' || string_agg(labels.label, ''',''') || ''')'
|
2016-11-10 08:06:09 +01:00
|
|
|
from (
|
|
|
|
select pg_enum.enumlabel as label
|
|
|
|
from pg_enum
|
|
|
|
where pg_enum.enumtypid =
|
|
|
|
(
|
|
|
|
select typelem
|
|
|
|
from pg_type
|
|
|
|
where pg_type.typtype = 'b' and pg_type.typname = ('_' || c.udt_name)
|
|
|
|
limit 1
|
|
|
|
)
|
|
|
|
order by pg_enum.enumsortorder
|
|
|
|
) as labels
|
|
|
|
)
|
|
|
|
else c.data_type
|
|
|
|
end
|
|
|
|
) as column_type,
|
|
|
|
|
|
|
|
c.udt_name,
|
|
|
|
e.data_type as array_type,
|
|
|
|
c.column_default,
|
|
|
|
|
|
|
|
c.is_nullable = 'YES' as is_nullable,
|
|
|
|
(select exists(
|
|
|
|
select 1
|
2017-01-04 04:43:01 +01:00
|
|
|
from information_schema.table_constraints tc
|
|
|
|
inner join information_schema.constraint_column_usage as ccu on tc.constraint_name = ccu.constraint_name
|
|
|
|
where tc.table_schema = $1 and tc.constraint_type = 'UNIQUE' and ccu.constraint_schema = $1 and ccu.table_name = c.table_name and ccu.column_name = c.column_name and
|
|
|
|
(select count(*) from information_schema.constraint_column_usage where constraint_schema = $1 and constraint_name = tc.constraint_name) = 1
|
|
|
|
)) OR
|
|
|
|
(select exists(
|
2016-11-10 08:06:09 +01:00
|
|
|
select 1
|
2017-01-04 04:43:01 +01:00
|
|
|
from pg_indexes pgix
|
|
|
|
inner join pg_class pgc on pgix.indexname = pgc.relname and pgc.relkind = 'i' and pgc.relnatts = 1
|
|
|
|
inner join pg_index pgi on pgi.indexrelid = pgc.oid
|
|
|
|
inner join pg_attribute pga on pga.attrelid = pgi.indrelid and pga.attnum = ANY(pgi.indkey)
|
2016-11-10 08:06:09 +01:00
|
|
|
where
|
|
|
|
pgix.schemaname = $1 and pgix.tablename = c.table_name and pga.attname = c.column_name and pgi.indisunique = true
|
2016-08-13 09:30:55 +02:00
|
|
|
)) as is_unique
|
2016-11-10 08:06:09 +01:00
|
|
|
|
|
|
|
from information_schema.columns as c
|
2017-04-29 06:07:39 +02:00
|
|
|
inner join pg_namespace as pgn on pgn.nspname = c.udt_schema
|
|
|
|
left join pg_type pgt on c.data_type = 'USER-DEFINED' and pgn.oid = pgt.typnamespace and c.udt_name = pgt.typname
|
2016-11-10 08:06:09 +01:00
|
|
|
left join information_schema.element_types e
|
|
|
|
on ((c.table_catalog, c.table_schema, c.table_name, 'TABLE', c.dtd_identifier)
|
|
|
|
= (e.object_catalog, e.object_schema, e.object_name, e.object_type, e.collection_type_identifier))
|
2017-01-04 04:43:01 +01:00
|
|
|
where c.table_name = $2 and c.table_schema = $1;
|
2016-09-09 05:42:02 +02:00
|
|
|
`, schema, tableName)
|
2016-02-23 09:27:32 +01:00
|
|
|
|
|
|
|
if err != nil {
|
|
|
|
return nil, err
|
|
|
|
}
|
|
|
|
defer rows.Close()
|
2016-08-15 03:36:22 +02:00
|
|
|
|
2016-02-23 09:27:32 +01:00
|
|
|
for rows.Next() {
|
2016-11-10 08:06:09 +01:00
|
|
|
var colName, colType, udtName string
|
|
|
|
var defaultValue, arrayType *string
|
|
|
|
var nullable, unique bool
|
|
|
|
if err := rows.Scan(&colName, &colType, &udtName, &arrayType, &defaultValue, &nullable, &unique); err != nil {
|
2016-08-13 20:36:03 +02:00
|
|
|
return nil, errors.Wrapf(err, "unable to scan for table %s", tableName)
|
2016-05-17 12:00:56 +02:00
|
|
|
}
|
|
|
|
|
2016-06-23 08:48:49 +02:00
|
|
|
column := bdb.Column{
|
2016-09-11 19:40:59 +02:00
|
|
|
Name: colName,
|
|
|
|
DBType: colType,
|
2016-11-10 08:06:09 +01:00
|
|
|
ArrType: arrayType,
|
2016-09-11 19:40:59 +02:00
|
|
|
UDTName: udtName,
|
2016-11-10 08:06:09 +01:00
|
|
|
Nullable: nullable,
|
2016-09-11 19:40:59 +02:00
|
|
|
Unique: unique,
|
2016-04-03 09:15:35 +02:00
|
|
|
}
|
2016-11-10 08:06:09 +01:00
|
|
|
if defaultValue != nil {
|
|
|
|
column.Default = *defaultValue
|
|
|
|
}
|
|
|
|
|
2016-03-23 06:05:23 +01:00
|
|
|
columns = append(columns, column)
|
2016-02-23 09:27:32 +01:00
|
|
|
}
|
|
|
|
|
2016-03-23 06:05:23 +01:00
|
|
|
return columns, nil
|
2016-02-23 09:27:32 +01:00
|
|
|
}
|
|
|
|
|
2016-06-19 23:46:10 +02:00
|
|
|
// PrimaryKeyInfo looks up the primary key for a table.
|
2016-09-08 23:23:10 +02:00
|
|
|
func (p *PostgresDriver) PrimaryKeyInfo(schema, tableName string) (*bdb.PrimaryKey, error) {
|
2016-06-23 08:48:49 +02:00
|
|
|
pkey := &bdb.PrimaryKey{}
|
2016-03-23 07:02:11 +01:00
|
|
|
var err error
|
|
|
|
|
2016-06-14 16:53:36 +02:00
|
|
|
query := `
|
|
|
|
select tc.constraint_name
|
|
|
|
from information_schema.table_constraints as tc
|
2016-09-08 23:23:10 +02:00
|
|
|
where tc.table_name = $1 and tc.constraint_type = 'PRIMARY KEY' and tc.table_schema = $2;`
|
2016-03-23 07:02:11 +01:00
|
|
|
|
2016-09-08 23:23:10 +02:00
|
|
|
row := p.dbConn.QueryRow(query, tableName, schema)
|
2016-03-23 07:02:11 +01:00
|
|
|
if err = row.Scan(&pkey.Name); err != nil {
|
2016-06-14 16:53:36 +02:00
|
|
|
if err == sql.ErrNoRows {
|
|
|
|
return nil, nil
|
|
|
|
}
|
2016-03-23 07:02:11 +01:00
|
|
|
return nil, err
|
|
|
|
}
|
|
|
|
|
2016-06-14 16:53:36 +02:00
|
|
|
queryColumns := `
|
|
|
|
select kcu.column_name
|
|
|
|
from information_schema.key_column_usage as kcu
|
2016-09-08 23:23:10 +02:00
|
|
|
where constraint_name = $1 and table_schema = $2;`
|
2016-03-23 07:02:11 +01:00
|
|
|
|
|
|
|
var rows *sql.Rows
|
2016-09-08 23:23:10 +02:00
|
|
|
if rows, err = p.dbConn.Query(queryColumns, pkey.Name, schema); err != nil {
|
2016-03-23 07:02:11 +01:00
|
|
|
return nil, err
|
|
|
|
}
|
2016-08-15 03:36:22 +02:00
|
|
|
defer rows.Close()
|
2016-03-23 07:02:11 +01:00
|
|
|
|
2016-04-04 12:28:58 +02:00
|
|
|
var columns []string
|
2016-03-23 07:02:11 +01:00
|
|
|
for rows.Next() {
|
|
|
|
var column string
|
|
|
|
|
|
|
|
err = rows.Scan(&column)
|
|
|
|
if err != nil {
|
|
|
|
return nil, err
|
|
|
|
}
|
2016-04-04 12:28:58 +02:00
|
|
|
|
|
|
|
columns = append(columns, column)
|
2016-03-23 07:02:11 +01:00
|
|
|
}
|
|
|
|
|
|
|
|
if err = rows.Err(); err != nil {
|
|
|
|
return nil, err
|
|
|
|
}
|
|
|
|
|
2016-06-20 01:53:45 +02:00
|
|
|
pkey.Columns = columns
|
|
|
|
|
2016-03-23 07:02:11 +01:00
|
|
|
return pkey, nil
|
|
|
|
}
|
|
|
|
|
2017-05-11 23:42:20 +02:00
|
|
|
func (p *PostgresDriver) UniqueKeyInfo(schema, tableName string) ([]bdb.UniqueKey, error) {
|
|
|
|
return []bdb.UniqueKey{}, errors.New("not implemented")
|
|
|
|
}
|
|
|
|
|
2016-06-19 23:46:10 +02:00
|
|
|
// ForeignKeyInfo retrieves the foreign keys for a given table name.
|
2016-09-08 23:23:10 +02:00
|
|
|
func (p *PostgresDriver) ForeignKeyInfo(schema, tableName string) ([]bdb.ForeignKey, error) {
|
2016-06-23 08:48:49 +02:00
|
|
|
var fkeys []bdb.ForeignKey
|
2016-03-23 07:02:11 +01:00
|
|
|
|
|
|
|
query := `
|
2016-06-14 16:53:36 +02:00
|
|
|
select
|
2017-01-07 02:45:32 +01:00
|
|
|
pgcon.conname,
|
|
|
|
pgc.relname as source_table,
|
|
|
|
pgasrc.attname as source_column,
|
|
|
|
dstlookupname.relname as dest_table,
|
|
|
|
pgadst.attname as dest_column
|
|
|
|
from pg_namespace pgn
|
|
|
|
inner join pg_class pgc on pgn.oid = pgc.relnamespace and pgc.relkind = 'r'
|
|
|
|
inner join pg_constraint pgcon on pgn.oid = pgcon.connamespace and pgc.oid = pgcon.conrelid
|
|
|
|
inner join pg_class dstlookupname on pgcon.confrelid = dstlookupname.oid
|
|
|
|
inner join pg_attribute pgasrc on pgc.oid = pgasrc.attrelid and pgasrc.attnum = ANY(pgcon.conkey)
|
|
|
|
inner join pg_attribute pgadst on pgcon.confrelid = pgadst.attrelid and pgadst.attnum = ANY(pgcon.confkey)
|
|
|
|
where pgn.nspname = $2 and pgc.relname = $1 and pgcon.contype = 'f'`
|
2016-03-23 07:02:11 +01:00
|
|
|
|
|
|
|
var rows *sql.Rows
|
|
|
|
var err error
|
2016-09-08 23:23:10 +02:00
|
|
|
if rows, err = p.dbConn.Query(query, tableName, schema); err != nil {
|
2016-03-23 07:02:11 +01:00
|
|
|
return nil, err
|
|
|
|
}
|
|
|
|
|
|
|
|
for rows.Next() {
|
2016-06-23 08:48:49 +02:00
|
|
|
var fkey bdb.ForeignKey
|
2016-03-23 07:02:11 +01:00
|
|
|
var sourceTable string
|
|
|
|
|
2016-08-24 08:20:41 +02:00
|
|
|
fkey.Table = tableName
|
2016-03-23 07:02:11 +01:00
|
|
|
err = rows.Scan(&fkey.Name, &sourceTable, &fkey.Column, &fkey.ForeignTable, &fkey.ForeignColumn)
|
|
|
|
if err != nil {
|
|
|
|
return nil, err
|
|
|
|
}
|
2016-06-19 23:46:10 +02:00
|
|
|
|
|
|
|
fkeys = append(fkeys, fkey)
|
2016-03-23 07:02:11 +01:00
|
|
|
}
|
|
|
|
|
|
|
|
if err = rows.Err(); err != nil {
|
|
|
|
return nil, err
|
|
|
|
}
|
|
|
|
|
|
|
|
return fkeys, nil
|
|
|
|
}
|
|
|
|
|
2016-03-23 06:05:23 +01:00
|
|
|
// TranslateColumnType converts postgres database types to Go types, for example
|
2016-02-24 10:17:58 +01:00
|
|
|
// "varchar" to "string" and "bigint" to "int64". It returns this parsed data
|
2016-03-23 05:25:57 +01:00
|
|
|
// as a Column object.
|
2016-06-23 08:48:49 +02:00
|
|
|
func (p *PostgresDriver) TranslateColumnType(c bdb.Column) bdb.Column {
|
2016-07-09 19:13:35 +02:00
|
|
|
if c.Nullable {
|
2016-07-13 18:51:40 +02:00
|
|
|
switch c.DBType {
|
2016-06-02 23:07:51 +02:00
|
|
|
case "bigint", "bigserial":
|
|
|
|
c.Type = "null.Int64"
|
|
|
|
case "integer", "serial":
|
2016-08-01 07:50:33 +02:00
|
|
|
c.Type = "null.Int"
|
2016-06-02 23:07:51 +02:00
|
|
|
case "smallint", "smallserial":
|
|
|
|
c.Type = "null.Int16"
|
2016-09-08 16:35:43 +02:00
|
|
|
case "decimal", "numeric", "double precision":
|
2016-06-02 23:07:51 +02:00
|
|
|
c.Type = "null.Float64"
|
|
|
|
case "real":
|
|
|
|
c.Type = "null.Float32"
|
2016-09-08 16:35:43 +02:00
|
|
|
case "bit", "interval", "bit varying", "character", "money", "character varying", "cidr", "inet", "macaddr", "text", "uuid", "xml":
|
2016-03-23 05:25:57 +01:00
|
|
|
c.Type = "null.String"
|
2016-11-12 03:02:44 +01:00
|
|
|
case `"char"`:
|
|
|
|
c.Type = "null.Byte"
|
2016-09-08 11:07:33 +02:00
|
|
|
case "bytea":
|
|
|
|
c.Type = "null.Bytes"
|
|
|
|
case "json", "jsonb":
|
|
|
|
c.Type = "null.JSON"
|
2016-03-01 12:26:00 +01:00
|
|
|
case "boolean":
|
2016-03-23 05:25:57 +01:00
|
|
|
c.Type = "null.Bool"
|
2016-07-13 18:51:40 +02:00
|
|
|
case "date", "time", "timestamp without time zone", "timestamp with time zone":
|
2016-03-23 05:25:57 +01:00
|
|
|
c.Type = "null.Time"
|
2016-09-11 19:40:59 +02:00
|
|
|
case "ARRAY":
|
|
|
|
if c.ArrType == nil {
|
|
|
|
panic("unable to get postgres ARRAY underlying type")
|
|
|
|
}
|
|
|
|
c.Type = getArrayType(c)
|
|
|
|
// Make DBType something like ARRAYinteger for parsing with randomize.Struct
|
|
|
|
c.DBType = c.DBType + *c.ArrType
|
|
|
|
case "USER-DEFINED":
|
|
|
|
if c.UDTName == "hstore" {
|
2016-09-15 08:58:24 +02:00
|
|
|
c.Type = "types.HStore"
|
2016-09-11 19:40:59 +02:00
|
|
|
c.DBType = "hstore"
|
|
|
|
} else {
|
|
|
|
c.Type = "string"
|
2017-04-29 06:07:39 +02:00
|
|
|
fmt.Fprintln(os.Stderr, "Warning: Incompatible data type detected: %s\n", c.UDTName)
|
2016-09-11 19:40:59 +02:00
|
|
|
}
|
2016-03-01 12:26:00 +01:00
|
|
|
default:
|
2016-03-23 05:25:57 +01:00
|
|
|
c.Type = "null.String"
|
2016-03-01 12:26:00 +01:00
|
|
|
}
|
|
|
|
} else {
|
2016-07-13 18:51:40 +02:00
|
|
|
switch c.DBType {
|
2016-06-02 23:07:51 +02:00
|
|
|
case "bigint", "bigserial":
|
2016-03-23 05:25:57 +01:00
|
|
|
c.Type = "int64"
|
2016-06-02 23:07:51 +02:00
|
|
|
case "integer", "serial":
|
2016-08-01 07:50:33 +02:00
|
|
|
c.Type = "int"
|
2016-06-02 23:07:51 +02:00
|
|
|
case "smallint", "smallserial":
|
|
|
|
c.Type = "int16"
|
2016-09-08 16:35:43 +02:00
|
|
|
case "decimal", "numeric", "double precision":
|
2016-06-02 23:07:51 +02:00
|
|
|
c.Type = "float64"
|
|
|
|
case "real":
|
|
|
|
c.Type = "float32"
|
2016-09-08 16:35:43 +02:00
|
|
|
case "bit", "interval", "uuint", "bit varying", "character", "money", "character varying", "cidr", "inet", "macaddr", "text", "uuid", "xml":
|
2016-03-23 05:25:57 +01:00
|
|
|
c.Type = "string"
|
2016-11-12 03:02:44 +01:00
|
|
|
case `"char"`:
|
|
|
|
c.Type = "types.Byte"
|
2016-09-07 15:50:54 +02:00
|
|
|
case "json", "jsonb":
|
2016-09-08 11:07:33 +02:00
|
|
|
c.Type = "types.JSON"
|
2016-03-01 12:26:00 +01:00
|
|
|
case "bytea":
|
2016-03-23 05:25:57 +01:00
|
|
|
c.Type = "[]byte"
|
2016-03-01 12:26:00 +01:00
|
|
|
case "boolean":
|
2016-03-23 05:25:57 +01:00
|
|
|
c.Type = "bool"
|
2016-07-13 18:51:40 +02:00
|
|
|
case "date", "time", "timestamp without time zone", "timestamp with time zone":
|
2016-03-23 05:25:57 +01:00
|
|
|
c.Type = "time.Time"
|
2016-09-11 19:40:59 +02:00
|
|
|
case "ARRAY":
|
|
|
|
c.Type = getArrayType(c)
|
|
|
|
// Make DBType something like ARRAYinteger for parsing with randomize.Struct
|
|
|
|
c.DBType = c.DBType + *c.ArrType
|
|
|
|
case "USER-DEFINED":
|
|
|
|
if c.UDTName == "hstore" {
|
2016-09-15 08:58:24 +02:00
|
|
|
c.Type = "types.HStore"
|
2016-09-11 19:40:59 +02:00
|
|
|
c.DBType = "hstore"
|
|
|
|
} else {
|
|
|
|
c.Type = "string"
|
2016-10-16 11:37:28 +02:00
|
|
|
fmt.Printf("Warning: Incompatible data type detected: %s\n", c.UDTName)
|
2016-09-11 19:40:59 +02:00
|
|
|
}
|
2016-03-01 12:26:00 +01:00
|
|
|
default:
|
2016-03-23 05:25:57 +01:00
|
|
|
c.Type = "string"
|
2016-03-01 12:26:00 +01:00
|
|
|
}
|
2016-02-23 09:27:32 +01:00
|
|
|
}
|
|
|
|
|
2016-03-23 05:25:57 +01:00
|
|
|
return c
|
2016-02-23 09:27:32 +01:00
|
|
|
}
|
2016-08-03 04:05:05 +02:00
|
|
|
|
2016-09-11 19:40:59 +02:00
|
|
|
// getArrayType returns the correct boil.Array type for each database type
|
|
|
|
func getArrayType(c bdb.Column) string {
|
|
|
|
switch *c.ArrType {
|
|
|
|
case "bigint", "bigserial", "integer", "serial", "smallint", "smallserial":
|
|
|
|
return "types.Int64Array"
|
|
|
|
case "bytea":
|
|
|
|
return "types.BytesArray"
|
|
|
|
case "bit", "interval", "uuint", "bit varying", "character", "money", "character varying", "cidr", "inet", "macaddr", "text", "uuid", "xml":
|
|
|
|
return "types.StringArray"
|
2016-09-11 23:22:17 +02:00
|
|
|
case "boolean":
|
2016-09-11 19:40:59 +02:00
|
|
|
return "types.BoolArray"
|
|
|
|
case "decimal", "numeric", "double precision", "real":
|
|
|
|
return "types.Float64Array"
|
|
|
|
default:
|
2016-09-11 23:22:17 +02:00
|
|
|
return "types.StringArray"
|
2016-08-03 04:05:05 +02:00
|
|
|
}
|
|
|
|
}
|
2016-09-09 19:14:18 +02:00
|
|
|
|
|
|
|
// RightQuote is the quoting character for the right side of the identifier
|
2016-09-09 19:30:46 +02:00
|
|
|
func (p *PostgresDriver) RightQuote() byte {
|
|
|
|
return '"'
|
2016-09-09 19:14:18 +02:00
|
|
|
}
|
|
|
|
|
|
|
|
// LeftQuote is the quoting character for the left side of the identifier
|
2016-09-09 19:30:46 +02:00
|
|
|
func (p *PostgresDriver) LeftQuote() byte {
|
|
|
|
return '"'
|
2016-09-09 19:14:18 +02:00
|
|
|
}
|
|
|
|
|
|
|
|
// IndexPlaceholders returns true to indicate PSQL supports indexed placeholders
|
|
|
|
func (p *PostgresDriver) IndexPlaceholders() bool {
|
|
|
|
return true
|
|
|
|
}
|