Skip to content

Commit 766d4e4

Browse files
author
Nick Randall
committed
allow keys to be empty interfaces rather than strings
1 parent beedcd5 commit 766d4e4

File tree

10 files changed

+103
-102
lines changed

10 files changed

+103
-102
lines changed

.travis.yml

Lines changed: 4 additions & 3 deletions
Original file line numberDiff line numberDiff line change
@@ -1,10 +1,11 @@
11
language: go
22

33
go:
4-
- 1.7
4+
- 1.x
55

6-
before_install:
7-
- go get -t -v ./...
6+
install:
7+
- go get -u github.com/golang/dep/...
8+
- dep ensure
89

910
script:
1011
- go test -v -race -coverprofile=coverage.txt -covermode=atomic

cache.go

Lines changed: 6 additions & 6 deletions
Original file line numberDiff line numberDiff line change
@@ -4,9 +4,9 @@ import "context"
44

55
// The Cache interface. If a custom cache is provided, it must implement this interface.
66
type Cache interface {
7-
Get(context.Context, string) (Thunk, bool)
8-
Set(context.Context, string, Thunk)
9-
Delete(context.Context, string) bool
7+
Get(context.Context, interface{}) (Thunk, bool)
8+
Set(context.Context, interface{}, Thunk)
9+
Delete(context.Context, interface{}) bool
1010
Clear()
1111
}
1212

@@ -16,13 +16,13 @@ type Cache interface {
1616
type NoCache struct{}
1717

1818
// Get is a NOOP
19-
func (c *NoCache) Get(context.Context, string) (Thunk, bool) { return nil, false }
19+
func (c *NoCache) Get(context.Context, interface{}) (Thunk, bool) { return nil, false }
2020

2121
// Set is a NOOP
22-
func (c *NoCache) Set(context.Context, string, Thunk) { return }
22+
func (c *NoCache) Set(context.Context, interface{}, Thunk) { return }
2323

2424
// Delete is a NOOP
25-
func (c *NoCache) Delete(context.Context, string) bool { return false }
25+
func (c *NoCache) Delete(context.Context, interface{}) bool { return false }
2626

2727
// Clear is a NOOP
2828
func (c *NoCache) Clear() { return }

dataloader.go

Lines changed: 7 additions & 7 deletions
Original file line numberDiff line numberDiff line change
@@ -20,8 +20,8 @@ import (
2020
// different access permissions and consider creating a new instance per
2121
// web request.
2222
type Interface interface {
23-
Load(context.Context, string) Thunk
24-
LoadMany(context.Context, []string) ThunkMany
23+
Load(context.Context, interface{}) Thunk
24+
LoadMany(context.Context, []interface{}) ThunkMany
2525
Clear(context.Context, string) Interface
2626
ClearAll() Interface
2727
Prime(ctx context.Context, key string, value interface{}) Interface
@@ -31,7 +31,7 @@ type Interface interface {
3131
// It's important that the length of the input keys matches the length of the output results.
3232
//
3333
// The keys passed to this function are guaranteed to be unique
34-
type BatchFunc func(context.Context, []string) []*Result
34+
type BatchFunc func(context.Context, []interface{}) []*Result
3535

3636
// Result is the data structure that a BatchFunc returns.
3737
// It contains the resolved data, and any errors that may have occurred while fetching the data.
@@ -100,7 +100,7 @@ type ThunkMany func() ([]interface{}, []error)
100100

101101
// type used to on input channel
102102
type batchRequest struct {
103-
key string
103+
key interface{}
104104
channel chan *Result
105105
}
106106

@@ -191,7 +191,7 @@ func NewBatchedLoader(batchFn BatchFunc, opts ...Option) *Loader {
191191
}
192192

193193
// Load load/resolves the given key, returning a channel that will contain the value and error
194-
func (l *Loader) Load(originalContext context.Context, key string) Thunk {
194+
func (l *Loader) Load(originalContext context.Context, key interface{}) Thunk {
195195
ctx, finish := l.tracer.TraceLoad(originalContext, key)
196196

197197
c := make(chan *Result, 1)
@@ -267,7 +267,7 @@ func (l *Loader) Load(originalContext context.Context, key string) Thunk {
267267
}
268268

269269
// LoadMany loads mulitiple keys, returning a thunk (type: ThunkMany) that will resolve the keys passed in.
270-
func (l *Loader) LoadMany(originalContext context.Context, keys []string) ThunkMany {
270+
func (l *Loader) LoadMany(originalContext context.Context, keys []interface{}) ThunkMany {
271271
ctx, finish := l.tracer.TraceLoadMany(originalContext, keys)
272272

273273
length := len(keys)
@@ -386,7 +386,7 @@ func (b *batcher) end() {
386386

387387
// execute the batch of all items in queue
388388
func (b *batcher) batch(originalContext context.Context) {
389-
var keys []string
389+
var keys []interface{}
390390
var reqs []*batchRequest
391391
var items []*Result
392392
var panicErr interface{}

dataloader_test.go

Lines changed: 55 additions & 55 deletions
Original file line numberDiff line numberDiff line change
@@ -81,7 +81,7 @@ func TestLoader(t *testing.T) {
8181
t.Parallel()
8282
errorLoader, _ := ErrorLoader(0)
8383
ctx := context.Background()
84-
future := errorLoader.LoadMany(ctx, []string{"1", "2", "3"})
84+
future := errorLoader.LoadMany(ctx, []interface{}{"1", "2", "3"})
8585
_, err := future()
8686
if len(err) != 3 {
8787
t.Error("LoadMany didn't return right number of errors")
@@ -90,13 +90,13 @@ func TestLoader(t *testing.T) {
9090

9191
t.Run("test LoadMany returns len(errors) == len(keys)", func(t *testing.T) {
9292
t.Parallel()
93-
loader, _ := OneErrorLoader(0)
93+
loader, _ := OneErrorLoader(3)
9494
ctx := context.Background()
95-
future := loader.LoadMany(ctx, []string{"1", "2", "3"})
95+
future := loader.LoadMany(ctx, []interface{}{"1", "2", "3"})
9696
_, err := future()
97+
log.Printf("errs: %#v", err)
9798
if len(err) != 3 {
98-
t.Error("LoadMany didn't return right number of errors (should match size of input)")
99-
return
99+
t.Errorf("LoadMany didn't return right number of errors (should match size of input)")
100100
}
101101

102102
if err[0] == nil {
@@ -112,7 +112,7 @@ func TestLoader(t *testing.T) {
112112
t.Parallel()
113113
identityLoader, _ := IDLoader(0)
114114
ctx := context.Background()
115-
future := identityLoader.LoadMany(ctx, []string{"1", "2", "3"})
115+
future := identityLoader.LoadMany(ctx, []interface{}{"1", "2", "3"})
116116
go future()
117117
go future()
118118
})
@@ -127,7 +127,7 @@ func TestLoader(t *testing.T) {
127127
}()
128128
panicLoader, _ := PanicLoader(0)
129129
ctx := context.Background()
130-
future := panicLoader.LoadMany(ctx, []string{"1"})
130+
future := panicLoader.LoadMany(ctx, []interface{}{"1"})
131131
_, errs := future()
132132
if len(errs) < 1 || errs[0].Error() != "Panic received in batch function: Programming error" {
133133
t.Error("Panic was not propagated as an error.")
@@ -138,7 +138,7 @@ func TestLoader(t *testing.T) {
138138
t.Parallel()
139139
identityLoader, _ := IDLoader(0)
140140
ctx := context.Background()
141-
future := identityLoader.LoadMany(ctx, []string{"1", "2", "3"})
141+
future := identityLoader.LoadMany(ctx, []interface{}{"1", "2", "3"})
142142
results, _ := future()
143143
if results[0].(string) != "1" || results[1].(string) != "2" || results[2].(string) != "3" {
144144
t.Error("loadmany didn't return the right value")
@@ -162,8 +162,8 @@ func TestLoader(t *testing.T) {
162162
}
163163

164164
calls := *loadCalls
165-
inner := []string{"1", "2"}
166-
expected := [][]string{inner}
165+
inner := []interface{}{"1", "2"}
166+
expected := [][]interface{}{inner}
167167
if !reflect.DeepEqual(calls, expected) {
168168
t.Errorf("did not call batchFn in right order. Expected %#v, got %#v", expected, calls)
169169
}
@@ -176,7 +176,7 @@ func TestLoader(t *testing.T) {
176176

177177
n := 10
178178
reqs := []Thunk{}
179-
keys := []string{}
179+
keys := []interface{}{}
180180
for i := 0; i < n; i++ {
181181
key := strconv.Itoa(i)
182182
reqs = append(reqs, faultyLoader.Load(ctx, key))
@@ -215,9 +215,9 @@ func TestLoader(t *testing.T) {
215215
}
216216

217217
calls := *loadCalls
218-
inner1 := []string{"1", "2"}
219-
inner2 := []string{"3"}
220-
expected := [][]string{inner1, inner2}
218+
inner1 := []interface{}{"1", "2"}
219+
inner2 := []interface{}{"3"}
220+
expected := [][]interface{}{inner1, inner2}
221221
if !reflect.DeepEqual(calls, expected) {
222222
t.Errorf("did not respect max batch size. Expected %#v, got %#v", expected, calls)
223223
}
@@ -240,8 +240,8 @@ func TestLoader(t *testing.T) {
240240
}
241241

242242
calls := *loadCalls
243-
inner := []string{"1"}
244-
expected := [][]string{inner}
243+
inner := []interface{}{"1"}
244+
expected := [][]interface{}{inner}
245245
if !reflect.DeepEqual(calls, expected) {
246246
t.Errorf("did not respect max batch size. Expected %#v, got %#v", expected, calls)
247247
}
@@ -265,8 +265,8 @@ func TestLoader(t *testing.T) {
265265
}
266266

267267
calls := *loadCalls
268-
inner := []string{"1"}
269-
expected := [][]string{inner}
268+
inner := []interface{}{"1"}
269+
expected := [][]interface{}{inner}
270270
if !reflect.DeepEqual(calls, expected) {
271271
t.Errorf("did not respect max batch size. Expected %#v, got %#v", expected, calls)
272272
}
@@ -300,8 +300,8 @@ func TestLoader(t *testing.T) {
300300
}
301301

302302
calls := *loadCalls
303-
inner := []string{"1", "A"}
304-
expected := [][]string{inner}
303+
inner := []interface{}{"1", "A"}
304+
expected := [][]interface{}{inner}
305305
if !reflect.DeepEqual(calls, expected) {
306306
t.Errorf("did not respect max batch size. Expected %#v, got %#v", expected, calls)
307307
}
@@ -328,8 +328,8 @@ func TestLoader(t *testing.T) {
328328
}
329329

330330
calls := *loadCalls
331-
inner := []string{"1"}
332-
expected := [][]string{inner}
331+
inner := []interface{}{"1"}
332+
expected := [][]interface{}{inner}
333333
if !reflect.DeepEqual(calls, expected) {
334334
t.Errorf("did not batch queries. Expected %#v, got %#v", expected, calls)
335335
}
@@ -366,8 +366,8 @@ func TestLoader(t *testing.T) {
366366
}
367367

368368
calls := *loadCalls
369-
inner := []string{"1", "A", "B"}
370-
expected := [][]string{inner}
369+
inner := []interface{}{"1", "A", "B"}
370+
expected := [][]interface{}{inner}
371371
if !reflect.DeepEqual(calls, expected) {
372372
t.Errorf("did not respect max batch size. Expected %#v, got %#v", expected, calls)
373373
}
@@ -400,8 +400,8 @@ func TestLoader(t *testing.T) {
400400
}
401401

402402
calls := *loadCalls
403-
inner := []string{"1", "A", "B"}
404-
expected := [][]string{inner}
403+
inner := []interface{}{"1", "A", "B"}
404+
expected := [][]interface{}{inner}
405405
if !reflect.DeepEqual(calls, expected) {
406406
t.Errorf("did not respect max batch size. Expected %#v, got %#v", expected, calls)
407407
}
@@ -432,8 +432,8 @@ func TestLoader(t *testing.T) {
432432
}
433433

434434
calls := *loadCalls
435-
inner := []string{"1", "A", "B"}
436-
expected := [][]string{inner}
435+
inner := []interface{}{"1", "A", "B"}
436+
expected := [][]interface{}{inner}
437437
if !reflect.DeepEqual(calls, expected) {
438438
t.Errorf("did not respect max batch size. Expected %#v, got %#v", expected, calls)
439439
}
@@ -442,10 +442,10 @@ func TestLoader(t *testing.T) {
442442
}
443443

444444
// test helpers
445-
func IDLoader(max int) (*Loader, *[][]string) {
445+
func IDLoader(max int) (*Loader, *[][]interface{}) {
446446
var mu sync.Mutex
447-
var loadCalls [][]string
448-
identityLoader := NewBatchedLoader(func(_ context.Context, keys []string) []*Result {
447+
var loadCalls [][]interface{}
448+
identityLoader := NewBatchedLoader(func(_ context.Context, keys []interface{}) []*Result {
449449
var results []*Result
450450
mu.Lock()
451451
loadCalls = append(loadCalls, keys)
@@ -457,10 +457,10 @@ func IDLoader(max int) (*Loader, *[][]string) {
457457
}, WithBatchCapacity(max))
458458
return identityLoader, &loadCalls
459459
}
460-
func BatchOnlyLoader(max int) (*Loader, *[][]string) {
460+
func BatchOnlyLoader(max int) (*Loader, *[][]interface{}) {
461461
var mu sync.Mutex
462-
var loadCalls [][]string
463-
identityLoader := NewBatchedLoader(func(_ context.Context, keys []string) []*Result {
462+
var loadCalls [][]interface{}
463+
identityLoader := NewBatchedLoader(func(_ context.Context, keys []interface{}) []*Result {
464464
var results []*Result
465465
mu.Lock()
466466
loadCalls = append(loadCalls, keys)
@@ -472,10 +472,10 @@ func BatchOnlyLoader(max int) (*Loader, *[][]string) {
472472
}, WithBatchCapacity(max), WithClearCacheOnBatch())
473473
return identityLoader, &loadCalls
474474
}
475-
func ErrorLoader(max int) (*Loader, *[][]string) {
475+
func ErrorLoader(max int) (*Loader, *[][]interface{}) {
476476
var mu sync.Mutex
477-
var loadCalls [][]string
478-
identityLoader := NewBatchedLoader(func(_ context.Context, keys []string) []*Result {
477+
var loadCalls [][]interface{}
478+
identityLoader := NewBatchedLoader(func(_ context.Context, keys []interface{}) []*Result {
479479
var results []*Result
480480
mu.Lock()
481481
loadCalls = append(loadCalls, keys)
@@ -487,11 +487,11 @@ func ErrorLoader(max int) (*Loader, *[][]string) {
487487
}, WithBatchCapacity(max))
488488
return identityLoader, &loadCalls
489489
}
490-
func OneErrorLoader(max int) (*Loader, *[][]string) {
490+
func OneErrorLoader(max int) (*Loader, *[][]interface{}) {
491491
var mu sync.Mutex
492-
var loadCalls [][]string
493-
identityLoader := NewBatchedLoader(func(_ context.Context, keys []string) []*Result {
494-
var results []*Result
492+
var loadCalls [][]interface{}
493+
identityLoader := NewBatchedLoader(func(_ context.Context, keys []interface{}) []*Result {
494+
results := make([]*Result, max, max)
495495
mu.Lock()
496496
loadCalls = append(loadCalls, keys)
497497
mu.Unlock()
@@ -500,23 +500,23 @@ func OneErrorLoader(max int) (*Loader, *[][]string) {
500500
if i == 0 {
501501
err = errors.New("always error on the first key")
502502
}
503-
results = append(results, &Result{key, err})
503+
results[i] = &Result{key, err}
504504
}
505505
return results
506506
}, WithBatchCapacity(max))
507507
return identityLoader, &loadCalls
508508
}
509-
func PanicLoader(max int) (*Loader, *[][]string) {
510-
var loadCalls [][]string
511-
panicLoader := NewBatchedLoader(func(_ context.Context, keys []string) []*Result {
509+
func PanicLoader(max int) (*Loader, *[][]interface{}) {
510+
var loadCalls [][]interface{}
511+
panicLoader := NewBatchedLoader(func(_ context.Context, keys []interface{}) []*Result {
512512
panic("Programming error")
513513
}, WithBatchCapacity(max), withSilentLogger())
514514
return panicLoader, &loadCalls
515515
}
516-
func BadLoader(max int) (*Loader, *[][]string) {
516+
func BadLoader(max int) (*Loader, *[][]interface{}) {
517517
var mu sync.Mutex
518-
var loadCalls [][]string
519-
identityLoader := NewBatchedLoader(func(_ context.Context, keys []string) []*Result {
518+
var loadCalls [][]interface{}
519+
identityLoader := NewBatchedLoader(func(_ context.Context, keys []interface{}) []*Result {
520520
var results []*Result
521521
mu.Lock()
522522
loadCalls = append(loadCalls, keys)
@@ -526,11 +526,11 @@ func BadLoader(max int) (*Loader, *[][]string) {
526526
}, WithBatchCapacity(max))
527527
return identityLoader, &loadCalls
528528
}
529-
func NoCacheLoader(max int) (*Loader, *[][]string) {
529+
func NoCacheLoader(max int) (*Loader, *[][]interface{}) {
530530
var mu sync.Mutex
531-
var loadCalls [][]string
531+
var loadCalls [][]interface{}
532532
cache := &NoCache{}
533-
identityLoader := NewBatchedLoader(func(_ context.Context, keys []string) []*Result {
533+
identityLoader := NewBatchedLoader(func(_ context.Context, keys []interface{}) []*Result {
534534
var results []*Result
535535
mu.Lock()
536536
loadCalls = append(loadCalls, keys)
@@ -544,11 +544,11 @@ func NoCacheLoader(max int) (*Loader, *[][]string) {
544544
}
545545

546546
// FaultyLoader gives len(keys)-1 results.
547-
func FaultyLoader() (*Loader, *[][]string) {
547+
func FaultyLoader() (*Loader, *[][]interface{}) {
548548
var mu sync.Mutex
549-
var loadCalls [][]string
549+
var loadCalls [][]interface{}
550550

551-
loader := NewBatchedLoader(func(_ context.Context, keys []string) []*Result {
551+
loader := NewBatchedLoader(func(_ context.Context, keys []interface{}) []*Result {
552552
var results []*Result
553553
mu.Lock()
554554
loadCalls = append(loadCalls, keys)
@@ -573,7 +573,7 @@ func FaultyLoader() (*Loader, *[][]string) {
573573
///////////////////////////////////////////////////
574574
var a = &Avg{}
575575

576-
func batchIdentity(_ context.Context, keys []string) (results []*Result) {
576+
func batchIdentity(_ context.Context, keys []interface{}) (results []*Result) {
577577
a.Add(len(keys))
578578
for _, key := range keys {
579579
results = append(results, &Result{key, nil})

0 commit comments

Comments
 (0)