Skip to content

Commit 5fae0f9

Browse files
committed
Hex strings to byte slices/arrays
Redesigned to have byte slices/arrays instead of strings for almost all relevant fields
1 parent 02e3b3e commit 5fae0f9

29 files changed

+206
-255
lines changed

go.mod

Lines changed: 19 additions & 14 deletions
Original file line numberDiff line numberDiff line change
@@ -1,37 +1,39 @@
11
module github.com/setavenger/blindbit-oracle
22

3-
go 1.20
3+
go 1.24.1
44

55
require (
6-
github.com/btcsuite/btcd/btcec/v2 v2.3.3
7-
github.com/btcsuite/btcd/btcutil v1.1.5
6+
github.com/btcsuite/btcd/btcutil v1.1.6
87
github.com/btcsuite/btcd/chaincfg/chainhash v1.1.0
98
github.com/btcsuite/btcutil v0.0.0-20190425235716-9e5f4b9a998d
109
github.com/gin-contrib/cors v1.7.2
11-
github.com/gin-gonic/gin v1.9.1
12-
github.com/shopspring/decimal v1.3.1
13-
github.com/spf13/viper v1.18.2
10+
github.com/gin-contrib/gzip v1.2.2
11+
github.com/gin-gonic/gin v1.10.0
12+
github.com/rs/zerolog v1.34.0
13+
github.com/setavenger/blindbit-lib v0.0.0-20250807130019-f9642edb8c97
14+
github.com/setavenger/go-bip352 v0.1.8-0.20250807125845-136879952399
15+
github.com/setavenger/go-libsecp256k1 v0.0.0
16+
github.com/spf13/viper v1.19.0
1417
github.com/syndtr/goleveldb v1.0.1-0.20210819022825-2ae1ddf74ef7
15-
golang.org/x/crypto v0.22.0
1618
)
1719

1820
require (
1921
github.com/aead/siphash v1.0.1 // indirect
20-
github.com/btcsuite/btcd v0.23.5-0.20231215221805-96c9fd8078fd // indirect
22+
github.com/btcsuite/btcd v0.24.2 // indirect
23+
github.com/btcsuite/btcd/btcec/v2 v2.3.5 // indirect
2124
github.com/btcsuite/btclog v0.0.0-20170628155309-84c8d2346e9f // indirect
2225
github.com/bytedance/sonic v1.11.6 // indirect
2326
github.com/bytedance/sonic/loader v0.1.1 // indirect
2427
github.com/cloudwego/base64x v0.1.4 // indirect
25-
github.com/cloudwego/iasm v0.2.0 // indirect
26-
github.com/decred/dcrd/crypto/blake256 v1.0.0 // indirect
27-
github.com/decred/dcrd/dcrec/secp256k1/v4 v4.0.1 // indirect
28+
github.com/davecgh/go-spew v1.1.2-0.20180830191138-d8f796af33cc // indirect
29+
github.com/decred/dcrd/crypto/blake256 v1.1.0 // indirect
30+
github.com/decred/dcrd/dcrec/secp256k1/v4 v4.4.0 // indirect
2831
github.com/fsnotify/fsnotify v1.7.0 // indirect
2932
github.com/gabriel-vasile/mimetype v1.4.3 // indirect
3033
github.com/gin-contrib/sse v0.1.0 // indirect
3134
github.com/go-playground/locales v0.14.1 // indirect
3235
github.com/go-playground/universal-translator v0.18.1 // indirect
3336
github.com/go-playground/validator/v10 v10.24.0 // indirect
34-
github.com/gocarina/gocsv v0.0.0-20240520201108-78e41c74b4b1 // indirect
3537
github.com/goccy/go-json v0.10.4 // indirect
3638
github.com/golang/snappy v0.0.5-0.20220116011046-fa5810519dcb // indirect
3739
github.com/hashicorp/hcl v1.0.0 // indirect
@@ -44,16 +46,19 @@ require (
4446
github.com/mitchellh/mapstructure v1.5.0 // indirect
4547
github.com/modern-go/concurrent v0.0.0-20180306012644-bacd9c7ef1dd // indirect
4648
github.com/modern-go/reflect2 v1.0.2 // indirect
47-
github.com/pelletier/go-toml/v2 v2.2.1 // indirect
49+
github.com/pelletier/go-toml/v2 v2.2.3 // indirect
50+
github.com/pmezard/go-difflib v1.0.1-0.20181226105442-5d4384ee4fb2 // indirect
4851
github.com/sagikazarmark/locafero v0.4.0 // indirect
4952
github.com/sagikazarmark/slog-shim v0.1.0 // indirect
50-
github.com/setavenger/go-bip352 v0.1.6 // indirect
53+
github.com/shopspring/decimal v1.4.0 // indirect
5154
github.com/sourcegraph/conc v0.3.0 // indirect
5255
github.com/spf13/afero v1.11.0 // indirect
5356
github.com/spf13/cast v1.6.0 // indirect
5457
github.com/spf13/pflag v1.0.5 // indirect
58+
github.com/stretchr/testify v1.10.0 // indirect
5559
github.com/subosito/gotenv v1.6.0 // indirect
5660
github.com/twitchyliquid64/golang-asm v0.15.1 // indirect
61+
github.com/tyler-smith/go-bip39 v1.1.0 // indirect
5762
github.com/ugorji/go/codec v1.2.12 // indirect
5863
go.uber.org/atomic v1.9.0 // indirect
5964
go.uber.org/multierr v1.9.0 // indirect

internal/core/block_test.go

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -23,7 +23,7 @@ func TestBlockAnalysis(t *testing.T) {
2323
}
2424

2525
for _, tweak := range tweaks {
26-
logging.L.Info().Hex("tweak", tweak.TweakData[:]).Str("txid", tweak.Txid).Msg("tweak")
26+
logging.L.Info().Hex("tweak", tweak.TweakData[:]).Hex("txid", tweak.Txid[:]).Msg("tweak")
2727
}
2828

2929
for _, tx := range block.Txs {

internal/core/cfilter.go

Lines changed: 15 additions & 22 deletions
Original file line numberDiff line numberDiff line change
@@ -1,7 +1,6 @@
11
package core
22

33
import (
4-
"bytes"
54
"encoding/binary"
65
"encoding/hex"
76

@@ -61,26 +60,27 @@ func BuildNewUTXOsFilter(block *types.Block) (types.Filter, error) {
6160
return types.Filter{}, err
6261
}
6362

63+
blockhashBytes, err := hex.DecodeString(block.Hash)
64+
if err != nil {
65+
logging.L.Fatal().Err(err).Str("blockhash", block.Hash).Msg("Failed to decode block hash")
66+
return types.Filter{}, err
67+
}
68+
6469
return types.Filter{
6570
FilterType: 4,
6671
BlockHeight: block.Height,
6772
Data: nBytes,
68-
BlockHash: block.Hash,
73+
BlockHash: [32]byte(blockhashBytes),
6974
}, nil
7075
}
7176

7277
// BuildSpentUTXOsFilter creates a filter based on the spent
7378
func BuildSpentUTXOsFilter(spentOutpointsIndex types.SpentOutpointsIndex) (types.Filter, error) {
74-
blockHashBytes, err := hex.DecodeString(spentOutpointsIndex.BlockHash)
75-
if err != nil {
76-
logging.L.Fatal().Err(err).Str("blockhash", spentOutpointsIndex.BlockHash).Msg("Failed to decode block hash")
77-
return types.Filter{}, err
78-
}
7979
c := chainhash.Hash{}
8080

81-
err = c.SetBytes(utils.ReverseBytes(blockHashBytes))
81+
err := c.SetBytes(utils.ReverseBytes(spentOutpointsIndex.BlockHash[:]))
8282
if err != nil {
83-
logging.L.Fatal().Err(err).Str("blockhash", spentOutpointsIndex.BlockHash).Msg("Failed to set block hash")
83+
logging.L.Fatal().Err(err).Hex("blockhash", spentOutpointsIndex.BlockHash[:]).Msg("Failed to set block hash")
8484
return types.Filter{}, err
8585

8686
}
@@ -96,13 +96,13 @@ func BuildSpentUTXOsFilter(spentOutpointsIndex types.SpentOutpointsIndex) (types
9696

9797
filter, err := gcs.BuildGCSFilter(builder.DefaultP, builder.DefaultM, key, data)
9898
if err != nil {
99-
logging.L.Fatal().Err(err).Str("blockhash", spentOutpointsIndex.BlockHash).Msg("Failed to build GCS filter")
99+
logging.L.Fatal().Err(err).Hex("blockhash", spentOutpointsIndex.BlockHash[:]).Msg("Failed to build GCS filter")
100100
return types.Filter{}, err
101101
}
102102

103103
nBytes, err := filter.NBytes()
104104
if err != nil {
105-
logging.L.Fatal().Err(err).Str("blockhash", spentOutpointsIndex.BlockHash).Msg("Failed to get NBytes")
105+
logging.L.Fatal().Err(err).Hex("blockhash", spentOutpointsIndex.BlockHash[:]).Msg("Failed to get NBytes")
106106
return types.Filter{}, err
107107
}
108108

@@ -115,17 +115,10 @@ func BuildSpentUTXOsFilter(spentOutpointsIndex types.SpentOutpointsIndex) (types
115115
}
116116

117117
func SerialiseToOutpoint(utxo types.UTXO) ([]byte, error) {
118-
var buf bytes.Buffer
119-
120-
txidBytes, err := hex.DecodeString(utxo.Txid)
121-
if err != nil {
122-
logging.L.Fatal().Err(err).Str("txid", utxo.Txid).Msg("Failed to decode txid")
123-
return nil, err
124-
}
118+
out := make([]byte, 32+4)
125119

126-
// err is always nil
127-
buf.Write(utils.ReverseBytes(txidBytes))
120+
copy(out[:32], utils.ReverseBytesCopy(utxo.Txid[:]))
121+
binary.LittleEndian.PutUint32(out[32:], utxo.Vout)
128122

129-
binary.Write(&buf, binary.LittleEndian, utxo.Vout)
130-
return buf.Bytes(), err
123+
return out, nil
131124
}

internal/core/cleanup.go

Lines changed: 2 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -28,7 +28,7 @@ func overwriteUTXOsWithLookUp(utxos []types.UTXO) error {
2828
utxosToOverwrite = append(utxosToOverwrite, &utxo)
2929
}
3030
err := dblevel.InsertUTXOs(utxosToOverwrite)
31-
alreadyCheckedTxids := make(map[string]struct{})
31+
alreadyCheckedTxids := make(map[[32]byte]struct{})
3232
for _, utxo := range utxosToOverwrite {
3333
if _, ok := alreadyCheckedTxids[utxo.Txid]; ok {
3434
continue
@@ -90,7 +90,7 @@ func markSpentUTXOsAndTweaks(utxos []types.UTXO) error {
9090

9191
// we only need to check for one utxo per txid, so we reduce the number of utxos -> fewer lookups in DB
9292
var cleanUTXOs []types.UTXO
93-
includedTxids := make(map[string]bool)
93+
includedTxids := make(map[[32]byte]bool)
9494

9595
for _, utxo := range utxos {
9696
if _, exists := includedTxids[utxo.Txid]; !exists {

internal/core/extractutxos.go

Lines changed: 15 additions & 5 deletions
Original file line numberDiff line numberDiff line change
@@ -1,6 +1,8 @@
11
package core
22

33
import (
4+
"encoding/hex"
5+
46
"github.com/setavenger/blindbit-lib/logging"
57
"github.com/setavenger/blindbit-lib/utils"
68

@@ -19,14 +21,18 @@ func ExtractNewUTXOs(block *types.Block, eligible map[string]struct{}) []*types.
1921
}
2022
for _, vout := range tx.Vout {
2123
if vout.ScriptPubKey.Type == "witness_v1_taproot" {
24+
// we use the fix sized conversion below with a panic
25+
txidBytes, _ := hex.DecodeString(tx.Txid)
26+
blockHashBytes, _ := hex.DecodeString(block.Hash)
27+
2228
value := utils.ConvertFloatBTCtoSats(vout.Value)
2329
utxos = append(utxos, &types.UTXO{
24-
Txid: tx.Txid,
30+
Txid: utils.ConvertToFixedLength32(txidBytes),
2531
Vout: vout.N,
2632
Value: value,
2733
ScriptPubKey: vout.ScriptPubKey.Hex,
2834
BlockHeight: block.Height,
29-
BlockHash: block.Hash,
35+
BlockHash: utils.ConvertToFixedLength32(blockHashBytes),
3036
Timestamp: block.Timestamp,
3137
Spent: value == 0, // Mark as spent if value is 0
3238
})
@@ -72,15 +78,19 @@ func extractSpentTaprootPubKeysFromTx(tx *types.Transaction, block *types.Block)
7278
logging.L.Panic().Err(err).Msg("Headers not synced from first taproot like occurrence. Either build complete index or fully sync headers only.")
7379
return nil
7480
}
75-
blockHash = headerInv.Hash
81+
blockHash = hex.EncodeToString(headerInv.Hash[:])
7682
}
7783

84+
// we use the fix sized conversion below with a panic
85+
txidBytes, _ := hex.DecodeString(vin.Txid)
86+
blockHashBytes, _ := hex.DecodeString(blockHash)
87+
7888
spentUTXOs = append(spentUTXOs, types.UTXO{
79-
Txid: vin.Txid,
89+
Txid: utils.ConvertToFixedLength32(txidBytes),
8090
Vout: vin.Vout,
8191
Value: utils.ConvertFloatBTCtoSats(vin.Prevout.Value),
8292
ScriptPubKey: vin.Prevout.ScriptPubKey.Hex,
83-
BlockHash: blockHash,
93+
BlockHash: utils.ConvertToFixedLength32(blockHashBytes),
8494
Spent: true,
8595
})
8696
} else {

internal/core/routine.go

Lines changed: 21 additions & 6 deletions
Original file line numberDiff line numberDiff line change
@@ -1,11 +1,13 @@
11
package core
22

33
import (
4+
"encoding/hex"
45
"errors"
56
"fmt"
67
"time"
78

89
"github.com/setavenger/blindbit-lib/logging"
10+
"github.com/setavenger/blindbit-lib/utils"
911
"github.com/setavenger/blindbit-oracle/internal/config"
1012
"github.com/setavenger/blindbit-oracle/internal/dblevel"
1113
"github.com/setavenger/blindbit-oracle/internal/types"
@@ -62,7 +64,12 @@ func PullBlock(blockHash string) (*types.Block, error) {
6264
return nil, fmt.Errorf("block_hash invalid: %s", blockHash)
6365
}
6466
// this method is preferred over lastHeader because then this function can be called for PreviousBlockHash
65-
header, err := dblevel.FetchByBlockHashBlockHeader(blockHash)
67+
hashByteSlice, err := hex.DecodeString(blockHash)
68+
if err != nil {
69+
logging.L.Err(err).Msg("failed to hex decode blockhash")
70+
return nil, err
71+
}
72+
header, err := dblevel.FetchByBlockHashBlockHeader(utils.ConvertToFixedLength32(hashByteSlice))
6673
if err != nil && !errors.Is(err, dblevel.NoEntryErr{}) {
6774
// we ignore no entry error
6875
logging.L.Err(err).Msg("error fetching block header")
@@ -83,6 +90,7 @@ func PullBlock(blockHash string) (*types.Block, error) {
8390
}
8491

8592
// CheckBlock checks whether the block hash has already been processed and will process the block if needed
93+
// todo: needs to throw an error
8694
func CheckBlock(block *types.Block) {
8795
// todo add return type error
8896
// todo this should fail at the highest instance were its wrapped in,
@@ -116,8 +124,13 @@ func CheckBlock(block *types.Block) {
116124
return
117125
}
118126

127+
hashByteSlice, err := hex.DecodeString(block.Hash)
128+
if err != nil {
129+
logging.L.Err(err).Msg("could not decode blockhash hex")
130+
return
131+
}
119132
err = dblevel.InsertBlockHeaderInv(types.BlockHeaderInv{
120-
Hash: block.Hash,
133+
Hash: utils.ConvertToFixedLength32(hashByteSlice),
121134
Height: block.Height,
122135
Flag: true,
123136
})
@@ -152,7 +165,7 @@ func HandleBlock(block *types.Block) error {
152165
// build map for sorting
153166
tweaksForBlockMap := map[string]types.Tweak{}
154167
for _, tweak := range tweaksForBlock {
155-
tweaksForBlockMap[tweak.Txid] = tweak
168+
tweaksForBlockMap[hex.EncodeToString(tweak.Txid[:])] = tweak
156169
}
157170

158171
// we only create one of the two filters no dust can be derived from dust but not vice versa
@@ -161,7 +174,8 @@ func HandleBlock(block *types.Block) error {
161174
// full index with dust filter possibility
162175
// todo should we sort, overhead created
163176
tweakIndexDust := types.TweakIndexDustFromTweakArray(tweaksForBlockMap, block)
164-
tweakIndexDust.BlockHash = block.Hash
177+
blockHashBytes, _ := hex.DecodeString(block.Hash)
178+
tweakIndexDust.BlockHash = utils.ConvertToFixedLength32(blockHashBytes)
165179
tweakIndexDust.BlockHeight = block.Height
166180

167181
err = dblevel.InsertTweakIndexDust(tweakIndexDust)
@@ -173,7 +187,8 @@ func HandleBlock(block *types.Block) error {
173187
// normal full index no dust
174188
// todo should we sort, overhead created
175189
tweakIndex := types.TweakIndexFromTweakArray(tweaksForBlockMap, block)
176-
tweakIndex.BlockHash = block.Hash
190+
blockHashBytes, _ := hex.DecodeString(block.Hash)
191+
tweakIndex.BlockHash = utils.ConvertToFixedLength32(blockHashBytes)
177192
tweakIndex.BlockHeight = block.Height
178193
err = dblevel.InsertTweakIndex(tweakIndex)
179194
if err != nil {
@@ -199,7 +214,7 @@ func HandleBlock(block *types.Block) error {
199214
// mark all transaction which have eligible outputs
200215
eligibleTransaction := map[string]struct{}{}
201216
for _, tweak := range tweaksForBlock {
202-
eligibleTransaction[tweak.Txid] = struct{}{}
217+
eligibleTransaction[hex.EncodeToString(tweak.Txid[:])] = struct{}{}
203218
}
204219

205220
// first we need to get the new outputs because some of them might/will be spent in the same block

internal/core/spentutxos.go

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -21,7 +21,7 @@ func BuildSpentUTXOIndex(utxos []types.UTXO, block *types.Block) (types.SpentOut
2121
blockHashBytes = utils.ReverseBytes(blockHashBytes)
2222

2323
spentOutpointsIndex := types.SpentOutpointsIndex{
24-
BlockHash: block.Hash,
24+
BlockHash: utils.ConvertToFixedLength32(blockHashBytes),
2525
BlockHeight: block.Height,
2626
}
2727

internal/core/sync.go

Lines changed: 11 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -1,11 +1,13 @@
11
package core
22

33
import (
4+
"encoding/hex"
45
"errors"
56
"sort"
67
"sync"
78

89
"github.com/setavenger/blindbit-lib/logging"
10+
"github.com/setavenger/blindbit-lib/utils"
911
"github.com/setavenger/blindbit-oracle/internal/config"
1012
"github.com/setavenger/blindbit-oracle/internal/dblevel"
1113
"github.com/setavenger/blindbit-oracle/internal/types"
@@ -288,8 +290,16 @@ func PreSyncHeaders() error {
288290
// convert BlockHeaders to BlockerHeadersInv
289291
var headersInv []types.BlockHeaderInv
290292
for _, header := range headers {
293+
blockHashSlice, err := hex.DecodeString(header.Hash)
294+
if err != nil {
295+
// todo: remove all hex.Decode or hex.Encode code
296+
// there are only very few places where this actually needed
297+
logging.L.Err(err).Msg("blockhash could not be hex decoded")
298+
return err
299+
}
300+
291301
headersInv = append(headersInv, types.BlockHeaderInv{
292-
Hash: header.Hash,
302+
Hash: utils.ConvertToFixedLength32(blockHashSlice),
293303
Height: header.Height,
294304
Flag: false,
295305
})

0 commit comments

Comments
 (0)