Skip to content

Commit fe83ff8

Browse files
removed some stuff
1 parent e5657b7 commit fe83ff8

File tree

11 files changed

+34
-42
lines changed

11 files changed

+34
-42
lines changed

.trunk/trunk.yaml

Lines changed: 2 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -47,4 +47,6 @@ lint:
4747
actions:
4848
enabled:
4949
- trunk-announce
50+
- trunk-check-pre-push
51+
- trunk-fmt-pre-commit
5052
- trunk-upgrade-available

.vscode/settings.json

Lines changed: 1 addition & 3 deletions
Original file line numberDiff line numberDiff line change
@@ -2,7 +2,5 @@
22
"editor.formatOnSave": true,
33
"editor.defaultFormatter": "trunk.io",
44
"editor.trimAutoWhitespace": true,
5-
"trunk.autoInit": false,
6-
"go.alternateTools": { "go": "/home/harshil/go/bin/go" },
7-
"go.toolsEnvVars": { "GOROOT": "/home/harshil/go", "PATH": "/home/harshil/go/bin:${env:PATH}" }
5+
"trunk.autoInit": false
86
}

dgraph/cmd/alpha/run.go

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -456,7 +456,7 @@ func serveGRPC(l net.Listener, tlsCfg *tls.Config, closer *z.Closer) {
456456
opt := []grpc.ServerOption{
457457
grpc.MaxRecvMsgSize(x.GrpcMaxSize),
458458
grpc.MaxSendMsgSize(x.GrpcMaxSize),
459-
grpc.MaxConcurrentStreams(1<<20),
459+
grpc.MaxConcurrentStreams(1000),
460460
grpc.StatsHandler(&ocgrpc.ServerHandler{}),
461461
grpc.UnaryInterceptor(audit.AuditRequestGRPC),
462462
}

posting/index.go

Lines changed: 15 additions & 16 deletions
Original file line numberDiff line numberDiff line change
@@ -103,7 +103,7 @@ func (mp *MutationPipeline) InsertTokenizerIndexes(ctx context.Context, pipeline
103103
defer func() {
104104
fmt.Println("Inserting tokenizer indexes for predicate", pipeline.attr, "took", time.Since(startTime))
105105
}()
106-
106+
107107
tokenizers := schema.State().Tokenizer(ctx, pipeline.attr)
108108
if len(tokenizers) == 0 {
109109
return
@@ -149,7 +149,6 @@ func (mp *MutationPipeline) InsertTokenizerIndexes(ctx context.Context, pipeline
149149
f := func(numGo int) *types.LockedShardedMap[string, *pb.PostingList] {
150150
wg := &sync.WaitGroup{}
151151

152-
153152
globalMap := types.NewLockedShardedMap[string, *pb.PostingList]()
154153
process := func(start int) {
155154
tokenizers := schema.State().Tokenizer(ctx, pipeline.attr)
@@ -170,7 +169,7 @@ func (mp *MutationPipeline) InsertTokenizerIndexes(ctx context.Context, pipeline
170169
}
171170

172171
posting := valPost[stringValue]
173-
// Build info per iteration without indexEdge.
172+
// Build info per iteration without indexEdge.
174173
info := &indexMutationInfo{
175174
tokenizers: tokenizers,
176175
factorySpecs: factorySpecs,
@@ -182,9 +181,9 @@ func (mp *MutationPipeline) InsertTokenizerIndexes(ctx context.Context, pipeline
182181
}
183182

184183
info.edge = &pb.DirectedEdge{
185-
Attr: pipeline.attr,
186-
Op: pb.DirectedEdge_SET,
187-
Lang: string(posting.LangTag),
184+
Attr: pipeline.attr,
185+
Op: pb.DirectedEdge_SET,
186+
Lang: string(posting.LangTag),
188187
Value: posting.Value,
189188
}
190189

@@ -205,7 +204,7 @@ func (mp *MutationPipeline) InsertTokenizerIndexes(ctx context.Context, pipeline
205204
val.Postings = append(val.Postings, valPl.Postings...)
206205
localMap[string(key)] = val
207206
}
208-
}
207+
}
209208

210209
for key, value := range localMap {
211210
// pk, _ := x.Parse([]byte(key))
@@ -497,22 +496,22 @@ func (mp *MutationPipeline) ProcessCount(ctx context.Context, pipeline *Predicat
497496

498497
list.Lock()
499498
prevCount := list.GetLength(mp.txn.StartTs)
500-
499+
501500
for _, post := range postingList.Postings {
502501
found, _, _ := list.findPosting(post.StartTs, post.Uid)
503502
if found {
504503
if post.Op == Set {
505-
post.Op = Ovr
504+
post.Op = Ovr
506505
}
507506
} else {
508507
if post.Op == Del {
509508
continue
510509
}
511510
}
512511

513-
list.updateMutationLayer(post, isSingle, true)
512+
list.updateMutationLayer(post, isSingle, true)
514513
}
515-
514+
516515
newCount := list.GetLength(mp.txn.StartTs)
517516
updated := list.mutationMap.currentEntries != nil
518517
list.Unlock()
@@ -560,7 +559,7 @@ func (mp *MutationPipeline) ProcessSingle(ctx context.Context, pipeline *Predica
560559
postings := make(map[uint64]*pb.PostingList, 1000)
561560

562561
dataKey := x.DataKey(pipeline.attr, 0)
563-
insertDeleteAllEdge := !(index || reverse || count)
562+
insertDeleteAllEdge := !(index || reverse || count)
564563

565564
var oldVal *pb.Posting
566565
for edge := range pipeline.edges {
@@ -662,12 +661,12 @@ func (mp *MutationPipeline) ProcessSingle(ctx context.Context, pipeline *Predica
662661

663662
for uid, pl := range postings {
664663
binary.BigEndian.PutUint64(dataKey[len(dataKey)-8:], uid)
665-
key := baseKey+string(dataKey[len(dataKey)-8:])
664+
key := baseKey + string(dataKey[len(dataKey)-8:])
666665

667666
if !noConflict {
668667
mp.txn.addConflictKey(farm.Fingerprint64([]byte(key)))
669668
}
670-
669+
671670
if _, err := mp.txn.AddDelta(key, *pl); err != nil {
672671
pipeline.errCh <- err
673672
return
@@ -746,7 +745,7 @@ func (mp *MutationPipeline) ProcessPredicate(ctx context.Context, pipeline *Pred
746745
runListFn := false
747746

748747
if ok {
749-
if (isList || su.Lang) {
748+
if isList || su.Lang {
750749
runListFn = true
751750
}
752751
}
@@ -1795,7 +1794,7 @@ func (r *rebuilder) Run(ctx context.Context) error {
17951794
Key: []byte(key),
17961795
Value: dataBytes,
17971796
UserMeta: []byte{BitDeltaPosting},
1798-
Version: version,
1797+
Version: version,
17991798
}
18001799
kvs = append(kvs, &kv)
18011800
return nil

posting/index_test.go

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -148,7 +148,7 @@ func addMutation(t *testing.T, l *List, edge *pb.DirectedEdge, op uint32,
148148
writer := NewTxnWriter(pstore)
149149
require.NoError(t, txn.CommitToDisk(writer, commitTs))
150150
require.NoError(t, writer.Flush())
151-
newTxn := Oracle().RegisterStartTs(commitTs+1)
151+
newTxn := Oracle().RegisterStartTs(commitTs + 1)
152152
l, err = newTxn.Get(l.key)
153153
require.NoError(t, err)
154154
}

posting/list.go

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -1167,7 +1167,7 @@ func (l *List) pickPostings(readTs uint64) (uint64, []*pb.Posting) {
11671167
return pi.Uid < pj.Uid
11681168
})
11691169

1170-
if len(posts)> 0 {
1170+
if len(posts) > 0 {
11711171
if hasDeleteAll(posts[0]) {
11721172
posts = posts[1:]
11731173
}

posting/mvcc.go

Lines changed: 3 additions & 3 deletions
Original file line numberDiff line numberDiff line change
@@ -295,7 +295,7 @@ func (txn *Txn) CommitToDisk(writer *TxnWriter, commitTs uint64) error {
295295
for _, i := range cache.globalMap {
296296
i.Iterate(func(key string, data *pb.PostingList) error {
297297
return writer.update(commitTs, func(btxn *badger.Txn) error {
298-
if len(data.Postings) == 0{
298+
if len(data.Postings) == 0 {
299299
return nil
300300
}
301301
// pk, _ := x.Parse([]byte(key))
@@ -320,7 +320,7 @@ func (txn *Txn) CommitToDisk(writer *TxnWriter, commitTs uint64) error {
320320
}
321321
return nil
322322
})
323-
})
323+
})
324324
}
325325

326326
var idx int
@@ -332,7 +332,7 @@ func (txn *Txn) CommitToDisk(writer *TxnWriter, commitTs uint64) error {
332332
for ; idx < len(keys); idx++ {
333333
key := keys[idx]
334334
data, ok := cache.deltas.Get(key)
335-
if !ok || len(data) == 0{
335+
if !ok || len(data) == 0 {
336336
continue
337337
}
338338
// pk, _ := x.Parse([]byte(key))

tok/tok.go

Lines changed: 0 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -7,7 +7,6 @@ package tok
77

88
import (
99
"encoding/binary"
10-
"fmt"
1110
"math/big"
1211
"plugin"
1312
"strings"
@@ -419,7 +418,6 @@ func (t ExactTokenizer) Tokens(v interface{}) ([]string, error) {
419418
term = append(term, encodedTerm...)
420419

421420
t.buffer.Reset()
422-
fmt.Println("TOKEN", t.langBase, []byte(term), v)
423421
return []string{string(term)}, nil
424422
}
425423

types/locked_sharded_map.go

Lines changed: 3 additions & 4 deletions
Original file line numberDiff line numberDiff line change
@@ -113,9 +113,9 @@ func (s *LockedShardedMap[K, V]) ParallelIterate(f func(K, V) error) error {
113113
}
114114

115115
var (
116-
wg sync.WaitGroup
117-
errCh = make(chan error, 1)
118-
once sync.Once
116+
wg sync.WaitGroup
117+
errCh = make(chan error, 1)
118+
once sync.Once
119119
)
120120

121121
for i := range s.shards {
@@ -152,7 +152,6 @@ func (s *LockedShardedMap[K, V]) ParallelIterate(f func(K, V) error) error {
152152
}
153153
}
154154

155-
156155
func (s *LockedShardedMap[K, V]) Iterate(f func(K, V) error) error {
157156
if s == nil {
158157
return nil

worker/mutation_unit_test.go

Lines changed: 1 addition & 3 deletions
Original file line numberDiff line numberDiff line change
@@ -90,23 +90,21 @@ func TestReverseEdgeSetDel(t *testing.T) {
9090
Entity: 1,
9191
Op: pb.DirectedEdge_SET,
9292
}
93-
93+
9494
edgeSet2 := &pb.DirectedEdge{
9595
ValueId: 2,
9696
Attr: attr,
9797
Entity: 3,
9898
Op: pb.DirectedEdge_SET,
9999
}
100100

101-
102101
edgeSet3 := &pb.DirectedEdge{
103102
ValueId: 2,
104103
Attr: attr,
105104
Entity: 4,
106105
Op: pb.DirectedEdge_SET,
107106
}
108107

109-
110108
x.Check(newRunMutation(ctx, edgeSet1, txn))
111109
x.Check(newRunMutation(ctx, edgeSet2, txn))
112110
x.Check(newRunMutation(ctx, edgeSet3, txn))

0 commit comments

Comments
 (0)