Skip to content
This repository was archived by the owner on Sep 11, 2025. It is now read-only.
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
4 changes: 2 additions & 2 deletions lib/wasmextractor/wasmextractor.go
Original file line number Diff line number Diff line change
Expand Up @@ -127,7 +127,7 @@ func readImports(data []byte) []WasmItem {

imports := make([]WasmItem, numItems)

for i := 0; i < int(numItems); i++ {
for i := range int(numItems) {
moduleLen, n := binary.Uvarint(data[offset:])
offset += n

Expand Down Expand Up @@ -163,7 +163,7 @@ func readExports(data []byte) []WasmItem {

exports := make([]WasmItem, numItems)

for i := 0; i < int(numItems); i++ {
for i := range int(numItems) {
fieldLen, n := binary.Uvarint(data[offset:])
offset += n

Expand Down
5 changes: 3 additions & 2 deletions runtime/collections/in_mem/hnsw/vector_index_test.go
Original file line number Diff line number Diff line change
Expand Up @@ -11,6 +11,7 @@ package hnsw

import (
"fmt"
"slices"
"sync"
"testing"

Expand All @@ -36,7 +37,7 @@ func TestMultipleSequentialVectorIndexes(t *testing.T) {
numIndexes := 20

// Create and initialize the indexes
for i := 0; i < numIndexes; i++ {
for i := range numIndexes {
wg.Add(1)

go func(i int) {
Expand All @@ -52,7 +53,7 @@ func TestMultipleSequentialVectorIndexes(t *testing.T) {
for j := range baseTextIds {
textIds[j] = baseTextIds[j] + int64(i*len(baseTextIds))
keys[j] = baseKeys[j] + fmt.Sprint(i)
vecs[j] = append([]float32{}, baseVecs[j]...)
vecs[j] = slices.Clone(baseVecs[j])
for k := range vecs[j] {
vecs[j][k] += float32(i) / 10
}
Expand Down
5 changes: 3 additions & 2 deletions runtime/collections/in_mem/sequential/vector_index_test.go
Original file line number Diff line number Diff line change
Expand Up @@ -11,6 +11,7 @@ package sequential

import (
"fmt"
"slices"
"sync"
"testing"

Expand All @@ -36,7 +37,7 @@ func TestMultipleSequentialVectorIndexes(t *testing.T) {
numIndexes := 20

// Create and initialize the indexes
for i := 0; i < numIndexes; i++ {
for i := range numIndexes {
wg.Add(1)

go func(i int) {
Expand All @@ -52,7 +53,7 @@ func TestMultipleSequentialVectorIndexes(t *testing.T) {
for j := range baseTextIds {
textIds[j] = baseTextIds[j] + int64(i*len(baseTextIds))
keys[j] = baseKeys[j] + fmt.Sprint(i)
vecs[j] = append([]float32{}, baseVecs[j]...)
vecs[j] = slices.Clone(baseVecs[j])
for k := range vecs[j] {
vecs[j][k] += float32(i) / 10
}
Expand Down
2 changes: 1 addition & 1 deletion runtime/collections/in_mem/text_index_test.go
Original file line number Diff line number Diff line change
Expand Up @@ -25,7 +25,7 @@ func TestMultipleInMemCollections(t *testing.T) {
numCollections := 10

// Create and initialize the collections
for i := 0; i < numCollections; i++ {
for i := range numCollections {
wg.Add(1)

go func(i int) {
Expand Down
2 changes: 1 addition & 1 deletion runtime/collections/index/helper.go
Original file line number Diff line number Diff line change
Expand Up @@ -47,7 +47,7 @@ func BytesAsFloatArray(encoded []byte, retVal *[]float32) {
if resultLen == 0 {
return
}
for i := 0; i < resultLen; i++ {
for range resultLen {
// Assume LittleEndian for encoding since this is
// the assumption elsewhere when reading from client.
// See dgraph-io/dgo/protos/api.pb.go
Expand Down
4 changes: 2 additions & 2 deletions runtime/collections/utils/heap.go
Original file line number Diff line number Diff line change
Expand Up @@ -47,11 +47,11 @@ func (h MaxTupleHeap) Swap(i, j int) {
h[i], h[j] = h[j], h[i]
}

func (h *MaxTupleHeap) Push(x interface{}) {
func (h *MaxTupleHeap) Push(x any) {
*h = append(*h, x.(MaxHeapElement))
}

func (h *MaxTupleHeap) Pop() interface{} {
func (h *MaxTupleHeap) Pop() any {
old := *h
n := len(old)
x := old[n-1]
Expand Down
2 changes: 1 addition & 1 deletion runtime/collections/utils/heap_test.go
Original file line number Diff line number Diff line change
Expand Up @@ -43,7 +43,7 @@ func TestHeap(t *testing.T) {
expectedIndices := []string{"three", "two", "one"}
initialLen := h.Len() // Store initial length of heap

for i := 0; i < initialLen; i++ {
for i := range initialLen {
popped := heap.Pop(h).(MaxHeapElement)
if popped.value != expectedValues[i] || popped.index != expectedIndices[i] {
t.Errorf("Expected pop value of %v and index '%s', got %v and '%s'", expectedValues[i], expectedIndices[i], popped.value, popped.index)
Expand Down
10 changes: 2 additions & 8 deletions runtime/collections/vector.go
Original file line number Diff line number Diff line change
Expand Up @@ -35,10 +35,7 @@ func batchInsertVectorsToMemory(ctx context.Context, vectorIndex interfaces.Vect
return errors.New("mismatch in vectors, keys, and textIds")
}
for i := 0; i < len(textIds); i += batchSize {
end := i + batchSize
if end > len(textIds) {
end = len(textIds)
}
end := min(i+batchSize, len(textIds))
textIdsBatch := textIds[i:end]
vectorIdsBatch := vectorIds[i:end]
keysBatch := keys[i:end]
Expand Down Expand Up @@ -212,10 +209,7 @@ func processTexts(ctx context.Context, col interfaces.CollectionNamespace, vecto
return fmt.Errorf("mismatch in keys and texts")
}
for i := 0; i < len(keys); i += batchSize {
end := i + batchSize
if end > len(keys) {
end = len(keys)
}
end := min(i+batchSize, len(keys))
keysBatch := keys[i:end]
textsBatch := texts[i:end]

Expand Down
2 changes: 1 addition & 1 deletion runtime/db/inferencehistory.go
Original file line number Diff line number Diff line change
Expand Up @@ -54,7 +54,7 @@ type Inference struct {
StartedAt string `json:"started_at,omitempty"`
DurationMs int64 `json:"duration_ms,omitempty"`
Function string `json:"function,omitempty"`
Plugin Plugin `json:"plugin,omitempty"`
Plugin Plugin `json:"plugin"`
}

const batchSize = 100
Expand Down
4 changes: 2 additions & 2 deletions runtime/graphql/datasource/source.go
Original file line number Diff line number Diff line change
Expand Up @@ -120,7 +120,7 @@ func writeGraphQLResponse(ctx context.Context, out *bytes.Buffer, result any, gq
gqlErrors = append(gqlErrors, resolve.GraphQLError{
Message: fnErr.Error(),
Path: []any{fieldName},
Extensions: map[string]interface{}{
Extensions: map[string]any{
"level": "error",
},
})
Expand Down Expand Up @@ -402,7 +402,7 @@ func transformErrors(messages []utils.LogMessage, ci *callInfo) []resolve.GraphQ
errors = append(errors, resolve.GraphQLError{
Message: msg.Message,
Path: []any{ci.FieldInfo.AliasOrName()},
Extensions: map[string]interface{}{
Extensions: map[string]any{
"level": msg.Level,
},
})
Expand Down
10 changes: 5 additions & 5 deletions runtime/graphql/engine/logging.go
Original file line number Diff line number Diff line change
Expand Up @@ -60,8 +60,8 @@ func (l *loggerAdapter) LevelLogger(level abstractlogger.Level) abstractlogger.L
}
}

func (l *loggerAdapter) fields(fields []abstractlogger.Field) map[string]interface{} {
out := make(map[string]interface{}, len(fields))
func (l *loggerAdapter) fields(fields []abstractlogger.Field) map[string]any {
out := make(map[string]any, len(fields))
for _, f := range fields {

lf := *convertLoggerField(&f)
Expand Down Expand Up @@ -92,7 +92,7 @@ type LevelLoggerAdapter struct {
level abstractlogger.Level
}

func (s *LevelLoggerAdapter) Println(v ...interface{}) {
func (s *LevelLoggerAdapter) Println(v ...any) {
switch s.level {
case abstractlogger.DebugLevel:
s.l.Debug().Msgf("%v", v[0])
Expand All @@ -109,7 +109,7 @@ func (s *LevelLoggerAdapter) Println(v ...interface{}) {
}
}

func (s *LevelLoggerAdapter) Printf(format string, v ...interface{}) {
func (s *LevelLoggerAdapter) Printf(format string, v ...any) {
switch s.level {
case abstractlogger.DebugLevel:
s.l.Debug().Msgf(format, v...)
Expand Down Expand Up @@ -137,7 +137,7 @@ type loggerField struct {
stringsValue []string
intValue int64
byteValue []byte
interfaceValue interface{}
interfaceValue any
errorValue error
}

Expand Down
3 changes: 2 additions & 1 deletion runtime/graphql/graphql.go
Original file line number Diff line number Diff line change
Expand Up @@ -13,6 +13,7 @@ import (
"context"
"fmt"
"net/http"
"slices"
"strconv"
"strings"

Expand Down Expand Up @@ -172,7 +173,7 @@ func handleGraphQLRequest(w http.ResponseWriter, r *http.Request) {
if f := q.Get("fields"); f.Exists() && f.Type == gjson.Null {
response[f.Index] = '['
response[f.Index+1] = ']'
response = append(response[:f.Index+2], response[f.Index+4:]...)
response = slices.Delete(response, f.Index+2, f.Index+4)
}
}
}
Expand Down
4 changes: 2 additions & 2 deletions runtime/hnsw/distance_test.go
Original file line number Diff line number Diff line change
Expand Up @@ -48,8 +48,8 @@ func TestCosineSimilarity(t *testing.T) {
func BenchmarkCosineSimilarity(b *testing.B) {
v1 := randFloats(1536)
v2 := randFloats(1536)
b.ResetTimer()
for i := 0; i < b.N; i++ {

for b.Loop() {
_, err := CosineDistance(v1, v2)
if err != nil {
b.Fatal(err)
Expand Down
8 changes: 4 additions & 4 deletions runtime/hnsw/encode.go
Original file line number Diff line number Diff line change
Expand Up @@ -25,7 +25,7 @@ import (

var byteOrder = binary.LittleEndian

func binaryRead(r io.Reader, data interface{}) (int, error) {
func binaryRead(r io.Reader, data any) (int, error) {
switch v := data.(type) {
case *int:
br, ok := r.(io.ByteReader)
Expand Down Expand Up @@ -219,15 +219,15 @@ func (h *Graph[K]) Import(r io.Reader) error {
}

h.layers = make([]*layer[K], nLayers)
for i := 0; i < nLayers; i++ {
for i := range nLayers {
var nNodes int
_, err = binaryRead(r, &nNodes)
if err != nil {
return err
}

nodes := make(map[K]*layerNode[K], nNodes)
for j := 0; j < nNodes; j++ {
for j := range nNodes {
var key K
var vec Vector
var nNeighbors int
Expand All @@ -237,7 +237,7 @@ func (h *Graph[K]) Import(r io.Reader) error {
}

neighbors := make([]K, nNeighbors)
for k := 0; k < nNeighbors; k++ {
for k := range nNeighbors {
var neighbor K
_, err = binaryRead(r, &neighbor)
if err != nil {
Expand Down
15 changes: 7 additions & 8 deletions runtime/hnsw/encode_test.go
Original file line number Diff line number Diff line change
Expand Up @@ -133,7 +133,7 @@ func requireGraphApproxEquals[K cmp.Ordered](t *testing.T, g1, g2 *Graph[K]) {

func TestGraph_ExportImport(t *testing.T) {
g1 := newTestGraph[int]()
for i := 0; i < 128; i++ {
for i := range 128 {
err := g1.Add(
Node[int]{
i, randFloats(1),
Expand Down Expand Up @@ -176,7 +176,7 @@ func TestSavedGraph(t *testing.T) {
g1, err := LoadSavedGraph[int](dir + "/graph")
require.NoError(t, err)
require.Equal(t, 0, g1.Len())
for i := 0; i < 128; i++ {
for i := range 128 {
err := g1.Add(
Node[int]{
i, randFloats(1),
Expand All @@ -199,7 +199,7 @@ const benchGraphSize = 100
func BenchmarkGraph_Import(b *testing.B) {
b.ReportAllocs()
g := newTestGraph[int]()
for i := 0; i < benchGraphSize; i++ {
for i := range benchGraphSize {
err := g.Add(
Node[int]{
i, randFloats(256),
Expand All @@ -212,10 +212,9 @@ func BenchmarkGraph_Import(b *testing.B) {
err := g.Export(buf)
require.NoError(b, err)

b.ResetTimer()
b.SetBytes(int64(buf.Len()))

for i := 0; i < b.N; i++ {
for b.Loop() {
b.StopTimer()
rdr := bytes.NewReader(buf.Bytes())
g := newTestGraph[int]()
Expand All @@ -228,7 +227,7 @@ func BenchmarkGraph_Import(b *testing.B) {
func BenchmarkGraph_Export(b *testing.B) {
b.ReportAllocs()
g := newTestGraph[int]()
for i := 0; i < benchGraphSize; i++ {
for i := range benchGraphSize {
err := g.Add(
Node[int]{
i, randFloats(256),
Expand All @@ -238,8 +237,8 @@ func BenchmarkGraph_Export(b *testing.B) {
}

var buf bytes.Buffer
b.ResetTimer()
for i := 0; i < b.N; i++ {

for i := 0; b.Loop(); i++ {
err := g.Export(&buf)
require.NoError(b, err)
if i == 0 {
Expand Down
2 changes: 1 addition & 1 deletion runtime/hnsw/graph.go
Original file line number Diff line number Diff line change
Expand Up @@ -355,7 +355,7 @@ func (h *Graph[K]) randomLevel() (int, error) {
}
}

for level := 0; level < max; level++ {
for level := range max {
if h.Rng == nil {
h.Rng = defaultRand()
}
Expand Down
12 changes: 6 additions & 6 deletions runtime/hnsw/graph_test.go
Original file line number Diff line number Diff line change
Expand Up @@ -106,7 +106,7 @@ func newTestGraph[K cmp.Ordered]() *Graph[K] {
func TestGraph_AddSearch(t *testing.T) {
g := newTestGraph[int]()

for i := 0; i < 128; i++ {
for i := range 128 {
err := g.Add(
Node[int]{
Key: i,
Expand Down Expand Up @@ -151,7 +151,7 @@ func TestGraph_AddSearch(t *testing.T) {

func TestGraph_AddDelete(t *testing.T) {
g := newTestGraph[int]()
for i := 0; i < 128; i++ {
for i := range 128 {
err := g.Add(Node[int]{
Key: i,
Value: Vector{float32(i)},
Expand Down Expand Up @@ -197,7 +197,7 @@ func Benchmark_HNSW(b *testing.B) {
g := Graph[int]{}
g.Ml = 0.5
g.Distance = EuclideanDistance
for i := 0; i < size; i++ {
for i := range size {
err := g.Add(Node[int]{
Key: i,
Value: Vector{float32(i)},
Expand All @@ -207,7 +207,7 @@ func Benchmark_HNSW(b *testing.B) {
b.ResetTimer()

b.Run("Search", func(b *testing.B) {
for i := 0; i < b.N; i++ {
for i := 0; b.Loop(); i++ {
_, err := g.Search(
[]float32{float32(i % size)},
4,
Expand Down Expand Up @@ -235,7 +235,7 @@ func Benchmark_HNSW_1536(b *testing.B) {
g := newTestGraph[int]()
const size = 1000
points := make([]Node[int], size)
for i := 0; i < size; i++ {
for i := range size {
points[i] = Node[int]{
Key: i,
Value: Vector(randFloats(1536)),
Expand All @@ -246,7 +246,7 @@ func Benchmark_HNSW_1536(b *testing.B) {
b.ResetTimer()

b.Run("Search", func(b *testing.B) {
for i := 0; i < b.N; i++ {
for i := 0; b.Loop(); i++ {
_, err := g.Search(
points[i%size].Value,
4,
Expand Down
4 changes: 2 additions & 2 deletions runtime/hnsw/heap/heap.go
Original file line number Diff line number Diff line change
Expand Up @@ -36,11 +36,11 @@ func (h *innerHeap[T]) Swap(i, j int) {
h.data[i], h.data[j] = h.data[j], h.data[i]
}

func (h *innerHeap[T]) Push(x interface{}) {
func (h *innerHeap[T]) Push(x any) {
h.data = append(h.data, x.(T))
}

func (h *innerHeap[T]) Pop() interface{} {
func (h *innerHeap[T]) Pop() any {
n := len(h.data)
x := h.data[n-1]
h.data = h.data[:n-1]
Expand Down
Loading
Loading