Merge branch 'master' of github.com:vattle/sqlboiler
This commit is contained in:
commit
8f0bfe8c9e
11 changed files with 197 additions and 86 deletions
27
README.md
27
README.md
|
@ -14,22 +14,23 @@ or some other migration tool to manage this part of the database's life-cycle.
|
|||
|
||||
## Why another ORM
|
||||
|
||||
Whilst using the standard SQL library is efficient (cpu/mem wise), it can be cumbersome. We found ourselves remaking the
|
||||
same SQL helpers and wrappers for every project we were creating, but did not want to deal in the existing ORM options out
|
||||
there that utilize the "code-first" approach. `sqlx` is a great library, but very minimalistic and still requires a
|
||||
considerable amount of boilerplate for every project. Originally this project started as a SQL boilerplate generator (hence the name)
|
||||
that generated simple helper functions, but we found that we could accomplish the same task by turning it into a
|
||||
(mostly) fully fledged ORM generator, without any sacrifice in performance or congruency, but generous gains in flexibility.
|
||||
While attempting to migrate a legacy Rails database, we realized how much ActiveRecord benefitted us in terms of development velocity.
|
||||
Coming over to the Go `database/sql` package after using ActiveRecord feels extremely repetitive, super long-winded and down-right boring.
|
||||
Being Go veterans we knew the state of ORMs was shaky, and after a quick review we found what our fears confirmed. Most packages out
|
||||
there are code-first, reflect-based and have a very weak story around relationships between models. So with that we set out with these goals:
|
||||
|
||||
The approach we've taken has afforded us the following benefits:
|
||||
* Work with existing databases: Don't be the tool to define the schema, that's better left to other tools.
|
||||
* ActiveRecord-like productivity: Eliminate all sql boilerplate, have relationships as a first-class concept.
|
||||
* Go-like feel: Work with normal structs, call functions, no hyper-magical struct tags, small interfaces.
|
||||
* Go-like performance: Benchmark and optimize the hot-paths, perform like hand-rolled `sql.DB` code.
|
||||
|
||||
* Thorough relationship story. No unnecessary struct tags, no unnecessary configuration.
|
||||
* High performance and memory efficiency by minimizing run-time reflection.
|
||||
* The models package is type safe. This means no chance of random panics due to passing in the wrong type. No need for any type assertions.
|
||||
We believe with SQLBoiler and our database-first code-generation approach we've been able to successfully meet all of these goals. On top
|
||||
of that SQLBoiler also confers the following benefits:
|
||||
|
||||
* The models package is type safe. This means no chance of random panics due to passing in the wrong type. No need for interface{}.
|
||||
* Our types closely correlate to your database column types. This is expanded by our extended null package which supports nearly all Go data types.
|
||||
* Extensive auto-completion provides work-flow efficiency gains.
|
||||
* A system that is easier to debug. Your ORM is tailored to your schema, the code paths are easy to trace and generally very lucid.
|
||||
* An API you would write for yourself (we hope), that is compatible with most-any database schema.
|
||||
* A system that is easy to debug. Your ORM is tailored to your schema, the code paths should be easy to trace since it's not all buried in reflect.
|
||||
* Auto-completion provides work-flow efficiency gains.
|
||||
|
||||
Table of Contents
|
||||
=================
|
||||
|
|
|
@ -9,18 +9,13 @@ import (
|
|||
|
||||
// NonZeroDefaultSet returns the fields included in the
|
||||
// defaults slice that are non zero values
|
||||
func NonZeroDefaultSet(defaults []string, titleCases map[string]string, obj interface{}) []string {
|
||||
func NonZeroDefaultSet(defaults []string, obj interface{}) []string {
|
||||
c := make([]string, 0, len(defaults))
|
||||
|
||||
val := reflect.Indirect(reflect.ValueOf(obj))
|
||||
|
||||
for _, d := range defaults {
|
||||
var fieldName string
|
||||
if titleCases == nil {
|
||||
fieldName = strmangle.TitleCase(d)
|
||||
} else {
|
||||
fieldName = titleCases[d]
|
||||
}
|
||||
fieldName := strmangle.TitleCase(d)
|
||||
field := val.FieldByName(fieldName)
|
||||
if !field.IsValid() {
|
||||
panic(fmt.Sprintf("Could not find field name %s in type %T", fieldName, obj))
|
||||
|
|
|
@ -59,7 +59,7 @@ func TestNonZeroDefaultSet(t *testing.T) {
|
|||
}
|
||||
|
||||
for i, test := range tests {
|
||||
z := NonZeroDefaultSet(test.Defaults, nil, test.Obj)
|
||||
z := NonZeroDefaultSet(test.Defaults, test.Obj)
|
||||
if !reflect.DeepEqual(test.Ret, z) {
|
||||
t.Errorf("[%d] mismatch:\nWant: %#v\nGot: %#v", i, test.Ret, z)
|
||||
}
|
||||
|
|
|
@ -16,6 +16,7 @@ var (
|
|||
|
||||
mut sync.RWMutex
|
||||
bindingMaps = make(map[string][]uint64)
|
||||
structMaps = make(map[string]map[string]uint64)
|
||||
)
|
||||
|
||||
// Identifies what kind of object we're binding to
|
||||
|
@ -192,21 +193,33 @@ func bind(rows *sql.Rows, obj interface{}, structType, sliceType reflect.Type, b
|
|||
ptrSlice = reflect.Indirect(reflect.ValueOf(obj))
|
||||
}
|
||||
|
||||
var strMapping map[string]uint64
|
||||
var sok bool
|
||||
var mapping []uint64
|
||||
var ok bool
|
||||
|
||||
mapKey := makeCacheKey(structType.String(), cols)
|
||||
typStr := structType.String()
|
||||
|
||||
mapKey := makeCacheKey(typStr, cols)
|
||||
mut.RLock()
|
||||
mapping, ok = bindingMaps[mapKey]
|
||||
if !ok {
|
||||
if strMapping, sok = structMaps[typStr]; !sok {
|
||||
strMapping = MakeStructMapping(structType)
|
||||
}
|
||||
}
|
||||
mut.RUnlock()
|
||||
|
||||
if !ok {
|
||||
mapping, err = bindMapping(structType, cols)
|
||||
mapping, err = BindMapping(structType, strMapping, cols)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
|
||||
mut.Lock()
|
||||
if !sok {
|
||||
structMaps[typStr] = strMapping
|
||||
}
|
||||
bindingMaps[mapKey] = mapping
|
||||
mut.Unlock()
|
||||
}
|
||||
|
@ -224,12 +237,12 @@ func bind(rows *sql.Rows, obj interface{}, structType, sliceType reflect.Type, b
|
|||
|
||||
switch bkind {
|
||||
case kindStruct:
|
||||
pointers = ptrsFromMapping(reflect.Indirect(reflect.ValueOf(obj)), mapping)
|
||||
pointers = PtrsFromMapping(reflect.Indirect(reflect.ValueOf(obj)), mapping)
|
||||
case kindSliceStruct:
|
||||
pointers = ptrsFromMapping(oneStruct, mapping)
|
||||
pointers = PtrsFromMapping(oneStruct, mapping)
|
||||
case kindPtrSliceStruct:
|
||||
newStruct = reflect.New(structType)
|
||||
pointers = ptrsFromMapping(reflect.Indirect(newStruct), mapping)
|
||||
pointers = PtrsFromMapping(reflect.Indirect(newStruct), mapping)
|
||||
}
|
||||
if err != nil {
|
||||
return err
|
||||
|
@ -254,11 +267,10 @@ func bind(rows *sql.Rows, obj interface{}, structType, sliceType reflect.Type, b
|
|||
return nil
|
||||
}
|
||||
|
||||
// bindMapping creates a mapping that helps look up the pointer for the
|
||||
// BindMapping creates a mapping that helps look up the pointer for the
|
||||
// column given.
|
||||
func bindMapping(typ reflect.Type, cols []string) ([]uint64, error) {
|
||||
func BindMapping(typ reflect.Type, mapping map[string]uint64, cols []string) ([]uint64, error) {
|
||||
ptrs := make([]uint64, len(cols))
|
||||
mapping := makeStructMapping(typ)
|
||||
|
||||
ColLoop:
|
||||
for i, c := range cols {
|
||||
|
@ -283,19 +295,29 @@ ColLoop:
|
|||
return ptrs, nil
|
||||
}
|
||||
|
||||
// ptrsFromMapping expects to be passed an addressable struct that it's looking
|
||||
// for things on.
|
||||
func ptrsFromMapping(val reflect.Value, mapping []uint64) []interface{} {
|
||||
// PtrsFromMapping expects to be passed an addressable struct and a mapping
|
||||
// of where to find things. It pulls the pointers out referred to by the mapping.
|
||||
func PtrsFromMapping(val reflect.Value, mapping []uint64) []interface{} {
|
||||
ptrs := make([]interface{}, len(mapping))
|
||||
for i, m := range mapping {
|
||||
ptrs[i] = ptrFromMapping(val, m).Interface()
|
||||
ptrs[i] = ptrFromMapping(val, m, true).Interface()
|
||||
}
|
||||
return ptrs
|
||||
}
|
||||
|
||||
// ValuesFromMapping expects to be passed an addressable struct and a mapping
|
||||
// of where to find things. It pulls the pointers out referred to by the mapping.
|
||||
func ValuesFromMapping(val reflect.Value, mapping []uint64) []interface{} {
|
||||
ptrs := make([]interface{}, len(mapping))
|
||||
for i, m := range mapping {
|
||||
ptrs[i] = ptrFromMapping(val, m, false).Interface()
|
||||
}
|
||||
return ptrs
|
||||
}
|
||||
|
||||
// ptrFromMapping expects to be passed an addressable struct that it's looking
|
||||
// for things on.
|
||||
func ptrFromMapping(val reflect.Value, mapping uint64) reflect.Value {
|
||||
func ptrFromMapping(val reflect.Value, mapping uint64, addressOf bool) reflect.Value {
|
||||
for i := 0; i < 8; i++ {
|
||||
v := (mapping >> uint(i*8)) & sentinel
|
||||
|
||||
|
@ -315,7 +337,9 @@ func ptrFromMapping(val reflect.Value, mapping uint64) reflect.Value {
|
|||
panic("could not find pointer from mapping")
|
||||
}
|
||||
|
||||
func makeStructMapping(typ reflect.Type) map[string]uint64 {
|
||||
// MakeStructMapping creates a map of the struct to be able to quickly look
|
||||
// up its pointers and values by name.
|
||||
func MakeStructMapping(typ reflect.Type) map[string]uint64 {
|
||||
fieldMaps := make(map[string]uint64)
|
||||
makeStructMappingHelper(typ, "", 0, 0, fieldMaps)
|
||||
return fieldMaps
|
||||
|
|
|
@ -204,7 +204,7 @@ func TestMakeStructMapping(t *testing.T) {
|
|||
} `boil:",bind"`
|
||||
}{}
|
||||
|
||||
got := makeStructMapping(reflect.TypeOf(testStruct))
|
||||
got := MakeStructMapping(reflect.TypeOf(testStruct))
|
||||
|
||||
expectMap := map[string]uint64{
|
||||
"Different": testMakeMapping(0),
|
||||
|
@ -247,19 +247,19 @@ func TestPtrFromMapping(t *testing.T) {
|
|||
},
|
||||
}
|
||||
|
||||
v := ptrFromMapping(reflect.Indirect(reflect.ValueOf(val)), testMakeMapping(0))
|
||||
v := ptrFromMapping(reflect.Indirect(reflect.ValueOf(val)), testMakeMapping(0), true)
|
||||
if got := *v.Interface().(*int); got != 5 {
|
||||
t.Error("flat int was wrong:", got)
|
||||
}
|
||||
v = ptrFromMapping(reflect.Indirect(reflect.ValueOf(val)), testMakeMapping(1))
|
||||
v = ptrFromMapping(reflect.Indirect(reflect.ValueOf(val)), testMakeMapping(1), true)
|
||||
if got := *v.Interface().(*int); got != 0 {
|
||||
t.Error("flat pointer was wrong:", got)
|
||||
}
|
||||
v = ptrFromMapping(reflect.Indirect(reflect.ValueOf(val)), testMakeMapping(2, 0))
|
||||
v = ptrFromMapping(reflect.Indirect(reflect.ValueOf(val)), testMakeMapping(2, 0), true)
|
||||
if got := *v.Interface().(*int); got != 6 {
|
||||
t.Error("nested int was wrong:", got)
|
||||
}
|
||||
v = ptrFromMapping(reflect.Indirect(reflect.ValueOf(val)), testMakeMapping(2, 1))
|
||||
v = ptrFromMapping(reflect.Indirect(reflect.ValueOf(val)), testMakeMapping(2, 1), true)
|
||||
if got := *v.Interface().(*int); got != 0 {
|
||||
t.Error("nested pointer was wrong:", got)
|
||||
}
|
||||
|
|
|
@ -146,6 +146,8 @@ var defaultTemplateImports = imports{
|
|||
`"fmt"`,
|
||||
`"strings"`,
|
||||
`"database/sql"`,
|
||||
`"reflect"`,
|
||||
`"sync"`,
|
||||
`"time"`,
|
||||
},
|
||||
thirdParty: importList{
|
||||
|
@ -166,6 +168,7 @@ var defaultSingletonTemplateImports = map[string]imports{
|
|||
"boil_types": {
|
||||
thirdParty: importList{
|
||||
`"github.com/pkg/errors"`,
|
||||
`"github.com/vattle/sqlboiler/strmangle"`,
|
||||
},
|
||||
},
|
||||
}
|
||||
|
|
|
@ -1,3 +1,5 @@
|
|||
{{if .Table.IsJoinTable -}}
|
||||
{{else -}}
|
||||
{{- $varNameSingular := .Table.Name | singular | camelCase -}}
|
||||
{{- $tableNameSingular := .Table.Name | singular | titleCase -}}
|
||||
var (
|
||||
|
@ -5,11 +7,6 @@ var (
|
|||
{{$varNameSingular}}ColumnsWithoutDefault = []string{{"{"}}{{.Table.Columns | filterColumnsByDefault false | columnNames | stringMap .StringFuncs.quoteWrap | join ","}}{{"}"}}
|
||||
{{$varNameSingular}}ColumnsWithDefault = []string{{"{"}}{{.Table.Columns | filterColumnsByDefault true | columnNames | stringMap .StringFuncs.quoteWrap | join ","}}{{"}"}}
|
||||
{{$varNameSingular}}PrimaryKeyColumns = []string{{"{"}}{{.Table.PKey.Columns | stringMap .StringFuncs.quoteWrap | join ", "}}{{"}"}}
|
||||
{{$varNameSingular}}TitleCases = map[string]string{
|
||||
{{range $col := .Table.Columns | columnNames -}}
|
||||
"{{$col}}": "{{titleCase $col}}",
|
||||
{{end -}}
|
||||
}
|
||||
)
|
||||
|
||||
type (
|
||||
|
@ -23,5 +20,16 @@ type (
|
|||
}
|
||||
)
|
||||
|
||||
// Cache for insert and update
|
||||
var (
|
||||
{{$varNameSingular}}Type = reflect.TypeOf(&{{$tableNameSingular}}{})
|
||||
{{$varNameSingular}}Mapping = boil.MakeStructMapping({{$varNameSingular}}Type)
|
||||
{{$varNameSingular}}InsertCacheMut sync.RWMutex
|
||||
{{$varNameSingular}}InsertCache = make(map[string]insertCache)
|
||||
{{$varNameSingular}}UpdateCacheMut sync.RWMutex
|
||||
{{$varNameSingular}}UpdateCache = make(map[string]updateCache)
|
||||
)
|
||||
|
||||
// Force time package dependency for automated UpdatedAt/CreatedAt.
|
||||
var _ = time.Second
|
||||
{{end -}}
|
||||
|
|
|
@ -40,58 +40,86 @@ func (o *{{$tableNameSingular}}) Insert(exec boil.Executor, whitelist ... string
|
|||
}
|
||||
{{- end}}
|
||||
|
||||
wl, returnColumns := strmangle.InsertColumnSet(
|
||||
{{$varNameSingular}}Columns,
|
||||
{{$varNameSingular}}ColumnsWithDefault,
|
||||
{{$varNameSingular}}ColumnsWithoutDefault,
|
||||
boil.NonZeroDefaultSet({{$varNameSingular}}ColumnsWithDefault, {{$varNameSingular}}TitleCases, o),
|
||||
whitelist,
|
||||
)
|
||||
nzDefaults := boil.NonZeroDefaultSet({{$varNameSingular}}ColumnsWithDefault, o)
|
||||
|
||||
ins := fmt.Sprintf(`INSERT INTO {{.Table.Name}} ("%s") VALUES (%s)`, strings.Join(wl, `","`), strmangle.Placeholders(len(wl), 1, 1))
|
||||
key := makeCacheKey(whitelist, nzDefaults)
|
||||
{{$varNameSingular}}InsertCacheMut.RLock()
|
||||
cache, cached := {{$varNameSingular}}InsertCache[key]
|
||||
{{$varNameSingular}}InsertCacheMut.RUnlock()
|
||||
|
||||
{{if .UseLastInsertID}}
|
||||
if boil.DebugMode {
|
||||
fmt.Fprintln(boil.DebugWriter, ins)
|
||||
fmt.Fprintln(boil.DebugWriter, boil.GetStructValues(o, wl...))
|
||||
if !cached {
|
||||
wl, returnColumns := strmangle.InsertColumnSet(
|
||||
{{$varNameSingular}}Columns,
|
||||
{{$varNameSingular}}ColumnsWithDefault,
|
||||
{{$varNameSingular}}ColumnsWithoutDefault,
|
||||
nzDefaults,
|
||||
whitelist,
|
||||
)
|
||||
|
||||
cache.valueMapping, err = boil.BindMapping({{$varNameSingular}}Type, {{$varNameSingular}}Mapping, wl)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
cache.retMapping, err = boil.BindMapping({{$varNameSingular}}Type, {{$varNameSingular}}Mapping, returnColumns)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
cache.query = fmt.Sprintf(`INSERT INTO {{.Table.Name}} ("%s") VALUES (%s)`, strings.Join(wl, `","`), strmangle.Placeholders(len(wl), 1, 1))
|
||||
|
||||
if len(cache.retMapping) != 0 {
|
||||
{{if .UseLastInsertID -}}
|
||||
cache.retQuery = fmt.Sprintf(`SELECT %s FROM {{.Table.Name}} WHERE %s`, strings.Join(returnColumns, `","`), strmangle.WhereClause(1, {{$varNameSingular}}PrimaryKeyColumns))
|
||||
{{else -}}
|
||||
cache.query += fmt.Sprintf(` RETURNING %s`, strings.Join(returnColumns, ","))
|
||||
{{end -}}
|
||||
}
|
||||
}
|
||||
|
||||
result, err := exec.Exec(ins, boil.GetStructValues(o, wl...)...)
|
||||
value := reflect.Indirect(reflect.ValueOf(o))
|
||||
vals := boil.ValuesFromMapping(value, cache.valueMapping)
|
||||
{{if .UseLastInsertID}}
|
||||
if boil.DebugMode {
|
||||
fmt.Fprintln(boil.DebugWriter, cache.query)
|
||||
fmt.Fprintln(boil.DebugWriter, vals)
|
||||
}
|
||||
|
||||
result, err := exec.Exec(ins, vals...)
|
||||
if err != nil {
|
||||
return errors.Wrap(err, "{{.PkgName}}: unable to insert into {{.Table.Name}}")
|
||||
}
|
||||
|
||||
if len(cache.retMapping) == 0 {
|
||||
{{if not .NoHooks -}}
|
||||
if len(returnColumns) == 0 {
|
||||
return o.doAfterInsertHooks(exec)
|
||||
}
|
||||
{{- else -}}
|
||||
if len(returnColumns) == 0 {
|
||||
return o.doAfterInsertHooks(exec)
|
||||
{{else -}}
|
||||
return nil
|
||||
{{end -}}
|
||||
}
|
||||
{{- end}}
|
||||
|
||||
lastID, err := result.LastInsertId()
|
||||
if err != nil || lastID == 0 || len({{$varNameSingular}}AutoIncPrimaryKeys) != 1 {
|
||||
if err != nil || lastID == 0 || len({{$varNameSingular}}PrimaryKeyColumns) != 1 {
|
||||
return ErrSyncFail
|
||||
}
|
||||
|
||||
sel := fmt.Sprintf(`SELECT %s FROM {{.Table.Name}} WHERE %s`, strings.Join(returnColumns, `","`), strmangle.WhereClause(1, {{$varNameSingular}}AutoIncPrimaryKeys))
|
||||
err = exec.QueryRow(sel, lastID).Scan(boil.GetStructPointers(o, returnColumns...))
|
||||
if boil.DebugMode {
|
||||
fmt.Fprintln(boil.DebugWriter, cache.retQuery)
|
||||
fmt.Fprintln(boil.DebugWriter, lastID)
|
||||
}
|
||||
|
||||
err = exec.QueryRow(cache.retQuery, lastID).Scan(boil.PtrsFromMapping(value, cache.retMapping)...)
|
||||
if err != nil {
|
||||
return errors.Wrap(err, "{{.PkgName}}: unable to populate default values for {{.Table.Name}}")
|
||||
}
|
||||
{{else}}
|
||||
if len(returnColumns) != 0 {
|
||||
ins = ins + fmt.Sprintf(` RETURNING %s`, strings.Join(returnColumns, ","))
|
||||
err = exec.QueryRow(ins, boil.GetStructValues(o, wl...)...).Scan(boil.GetStructPointers(o, returnColumns...)...)
|
||||
if len(cache.retMapping) != 0 {
|
||||
err = exec.QueryRow(cache.query, vals...).Scan(boil.PtrsFromMapping(value, cache.retMapping)...)
|
||||
} else {
|
||||
_, err = exec.Exec(ins, boil.GetStructValues(o, wl...)...)
|
||||
_, err = exec.Exec(cache.query, vals...)
|
||||
}
|
||||
|
||||
if boil.DebugMode {
|
||||
fmt.Fprintln(boil.DebugWriter, ins)
|
||||
fmt.Fprintln(boil.DebugWriter, boil.GetStructValues(o, wl...))
|
||||
fmt.Fprintln(boil.DebugWriter, cache.query)
|
||||
fmt.Fprintln(boil.DebugWriter, vals)
|
||||
}
|
||||
|
||||
if err != nil {
|
||||
|
@ -99,6 +127,12 @@ func (o *{{$tableNameSingular}}) Insert(exec boil.Executor, whitelist ... string
|
|||
}
|
||||
{{end}}
|
||||
|
||||
if !cached {
|
||||
{{$varNameSingular}}InsertCacheMut.Lock()
|
||||
{{$varNameSingular}}InsertCache[key] = cache
|
||||
{{$varNameSingular}}InsertCacheMut.Unlock()
|
||||
}
|
||||
|
||||
{{if not .NoHooks -}}
|
||||
return o.doAfterInsertHooks(exec)
|
||||
{{- else -}}
|
||||
|
|
|
@ -37,31 +37,40 @@ func (o *{{$tableNameSingular}}) UpdateP(exec boil.Executor, whitelist ... strin
|
|||
func (o *{{$tableNameSingular}}) Update(exec boil.Executor, whitelist ... string) error {
|
||||
{{- template "timestamp_update_helper" . -}}
|
||||
|
||||
var err error
|
||||
{{if not .NoHooks -}}
|
||||
if err := o.doBeforeUpdateHooks(exec); err != nil {
|
||||
if err = o.doBeforeUpdateHooks(exec); err != nil {
|
||||
return err
|
||||
}
|
||||
{{- end}}
|
||||
{{end -}}
|
||||
|
||||
var err error
|
||||
var query string
|
||||
var values []interface{}
|
||||
key := makeCacheKey(whitelist, nil)
|
||||
{{$varNameSingular}}UpdateCacheMut.RLock()
|
||||
cache, cached := {{$varNameSingular}}UpdateCache[key]
|
||||
{{$varNameSingular}}UpdateCacheMut.RUnlock()
|
||||
|
||||
wl := strmangle.UpdateColumnSet({{$varNameSingular}}Columns, {{$varNameSingular}}PrimaryKeyColumns, whitelist)
|
||||
if len(wl) == 0 {
|
||||
if !cached {
|
||||
wl := strmangle.UpdateColumnSet({{$varNameSingular}}Columns, {{$varNameSingular}}PrimaryKeyColumns, whitelist)
|
||||
|
||||
cache.query = fmt.Sprintf(`UPDATE "{{.Table.Name}}" SET %s WHERE %s`, strmangle.SetParamNames(wl), strmangle.WhereClause(len(wl)+1, {{$varNameSingular}}PrimaryKeyColumns))
|
||||
cache.valueMapping, err = boil.BindMapping({{$varNameSingular}}Type, {{$varNameSingular}}Mapping, append(wl, {{$varNameSingular}}PrimaryKeyColumns...))
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
}
|
||||
|
||||
if len(cache.valueMapping) == 0 {
|
||||
return errors.New("{{.PkgName}}: unable to update {{.Table.Name}}, could not build whitelist")
|
||||
}
|
||||
|
||||
query = fmt.Sprintf(`UPDATE {{.Table.Name}} SET %s WHERE %s`, strmangle.SetParamNames(wl), strmangle.WhereClause(len(wl)+1, {{$varNameSingular}}PrimaryKeyColumns))
|
||||
values = boil.GetStructValues(o, wl...)
|
||||
values = append(values, {{.Table.PKey.Columns | stringMap .StringFuncs.titleCase | prefixStringSlice "o." | join ", "}})
|
||||
values := boil.ValuesFromMapping(reflect.Indirect(reflect.ValueOf(o)), cache.valueMapping)
|
||||
|
||||
if boil.DebugMode {
|
||||
fmt.Fprintln(boil.DebugWriter, query)
|
||||
fmt.Fprintln(boil.DebugWriter, cache.query)
|
||||
fmt.Fprintln(boil.DebugWriter, values)
|
||||
}
|
||||
|
||||
result, err := exec.Exec(query, values...)
|
||||
result, err := exec.Exec(cache.query, values...)
|
||||
if err != nil {
|
||||
return errors.Wrap(err, "{{.PkgName}}: unable to update {{.Table.Name}} row")
|
||||
}
|
||||
|
@ -70,6 +79,12 @@ func (o *{{$tableNameSingular}}) Update(exec boil.Executor, whitelist ... string
|
|||
return errors.Errorf("failed to update single row, updated %d rows", r)
|
||||
}
|
||||
|
||||
if !cached {
|
||||
{{$varNameSingular}}UpdateCacheMut.Lock()
|
||||
{{$varNameSingular}}UpdateCache[key] = cache
|
||||
{{$varNameSingular}}UpdateCacheMut.Unlock()
|
||||
}
|
||||
|
||||
{{if not .NoHooks -}}
|
||||
return o.doAfterUpdateHooks(exec)
|
||||
{{- else -}}
|
||||
|
|
|
@ -40,7 +40,7 @@ func (o *{{$tableNameSingular}}) Upsert(exec boil.Executor, updateOnConflict boo
|
|||
{{$varNameSingular}}Columns,
|
||||
{{$varNameSingular}}ColumnsWithDefault,
|
||||
{{$varNameSingular}}ColumnsWithoutDefault,
|
||||
boil.NonZeroDefaultSet({{$varNameSingular}}ColumnsWithDefault, {{$varNameSingular}}TitleCases, o),
|
||||
boil.NonZeroDefaultSet({{$varNameSingular}}ColumnsWithDefault, o),
|
||||
whitelist,
|
||||
)
|
||||
update := strmangle.UpdateColumnSet(
|
||||
|
|
|
@ -5,3 +5,34 @@ type M map[string]interface{}
|
|||
// order to populate default value information. This usually happens when LastInsertId
|
||||
// fails or there was a primary key configuration that was not resolvable.
|
||||
var ErrSyncFail = errors.New("{{.PkgName}}: failed to synchronize data after insert")
|
||||
|
||||
type insertCache struct{
|
||||
query string
|
||||
retQuery string
|
||||
valueMapping []uint64
|
||||
retMapping []uint64
|
||||
}
|
||||
|
||||
type updateCache struct{
|
||||
query string
|
||||
valueMapping []uint64
|
||||
}
|
||||
|
||||
func makeCacheKey(wl, nzDefaults []string) string {
|
||||
buf := strmangle.GetBuffer()
|
||||
|
||||
for _, w := range wl {
|
||||
buf.WriteString(w)
|
||||
}
|
||||
if len(nzDefaults) != 0 {
|
||||
buf.WriteByte('.')
|
||||
}
|
||||
for _, nz := range nzDefaults {
|
||||
buf.WriteString(nz)
|
||||
}
|
||||
|
||||
str := buf.String()
|
||||
strmangle.PutBuffer(buf)
|
||||
return str
|
||||
}
|
||||
|
||||
|
|
Loading…
Reference in a new issue