Skip to content
Open
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
66 changes: 46 additions & 20 deletions modules/foundation.js
Original file line number Diff line number Diff line change
Expand Up @@ -6,6 +6,52 @@ var SqlAsync = require('./sql_async.js')
var SqlComposer = require("./sql_composer.js")
var Commons = require('./commons.js')

function hashAll(...args) {
// Calculates the subsidy ID by hashing together the block ID bytes and foundation specifier
const hasher = blake.blake2bInit(32, null);

// loop through each of our arguments and update the hasher
for (let i = 0; i < args.length; i++)
blake.blake2bUpdate(hasher, args[i]);

//finalize the hash
return blake.blake2bFinal(hasher);
}

function newSpecifier(str) {
// Takes the specifier and transforms it into a buffer
const buf = Buffer.alloc(16);
buf.write(str, encoding='ascii');
return buf;
}

function encodeUint64(n) {
const buf = Buffer.alloc(8);
// cast to a BigInt in case it is only a number.
buf.writeBigUInt64LE(BigInt(n));
return buf;
}

const specifierStorageProof = newSpecifier("storage proof");

// This function takes the hash of a block and the index of a miner payout to
// calculate the OutputID of a miner payout.
exports.CalculateMinerOutputID = function(blockHash, i) {
return Buffer.from(hashAll(Buffer.from(blockHash, 'hex'), encodeUint64(i))).toString('hex');
}

exports.CalculateSiafundOutputID = function(siafundOutputHash) {
return Buffer.from(hashAll(Buffer.from(siafundOutputHash, 'hex'))).toString('hex');
}

exports.CalculateContractPayoutID = function(contractHash, validProof, i) {
const contractBuf = Buffer.from(contractHash, 'hex'),
proofBuf = Buffer.alloc(1, validProof ? 1 : 0),
index = encodeUint64(i);

return Buffer.from(hashAll(specifierStorageProof, contractBuf, proofBuf, index)).toString('hex');
}


exports.CalculateOutputIdSubsidy = async function(blockHash, specifier) {
// This function takes the hash of a block together with the specifier of the Sia Foundation and calculates
Expand All @@ -14,26 +60,6 @@ exports.CalculateOutputIdSubsidy = async function(blockHash, specifier) {

var specifierFoundation = newSpecifier(specifier);

function newSpecifier(str) {
// Takes the specifier and transforms it into a buffer
const buf = Buffer.alloc(16);
buf.write(str, encoding='ascii');
return buf;
}

function hashAll(...args) {
// Calculates the subsidy ID by hashing together the block ID bytes and foundation specifier

const hasher = blake.blake2bInit(32, null);

// loop through each of our arguments and update the hasher
for (let i = 0; i < args.length; i++)
blake.blake2bUpdate(hasher, args[i]);

//finalize the hash
return blake.blake2bFinal(hasher);
}

// convert the block ID from its hex encoded string to a Buffer
var blockID = Buffer.from(blockHash, 'hex');
// calculate the subsidy id
Expand Down
2 changes: 1 addition & 1 deletion modules/indexer.js
Original file line number Diff line number Diff line change
Expand Up @@ -281,7 +281,7 @@ async function minerPayoutProcessor(params, sqlBatch, addressesImplicated, heigh
sqlBatch.push(SqlComposer.InsertSql(params, "HashTypes", toAddHashTypes, minerPayoutTxId))

// Add the new output
sqlBatch = await Outputs.MiningPoolPayoutOutput(params, sqlBatch, api, height, payoutAddress)
sqlBatch = await Outputs.MiningPoolPayoutOutput(params, sqlBatch, api, height, payoutAddress, 0)

// Mining pool name
var miningPool = "Unknown" // Default
Expand Down
89 changes: 20 additions & 69 deletions modules/outputs.js
Original file line number Diff line number Diff line change
Expand Up @@ -2,6 +2,7 @@
var exports = module.exports={}
var SqlComposer = require("./sql_composer.js")
var Commons = require('./commons.js')
const { CalculateMinerOutputID, CalculateSiafundOutputID } = require('./foundation.js');

exports.Outputs = async function(params, api, sqlBatch) {
// Creates SQL entries for newly created outputs and updates the spent outputs
Expand Down Expand Up @@ -139,86 +140,36 @@ exports.Outputs = async function(params, api, sqlBatch) {
}


exports.SfClaimOutput = async function(params, sqlBatch, senderClaim, senderClaimAddress, txHash, height) {
exports.SfClaimOutput = async function(params, sqlBatch, siafundID, senderClaim, senderClaimAddress, txHash, height) {
// Adds the output created during a SF fees claim
const outputId = CalculateSiafundOutputID(siafundID);

// Push output to the batch: new output and outputID as a hash type
var sqlQuery = SqlComposer.CreateOutput(
params, "Outputs-SC", outputId, BigInt(senderClaim), senderClaimAddress, height)
sqlBatch.push(sqlQuery)

// Currently, the consensus/block API does not indicate in any field which is the outputID of the claimed fees. This used to be on
// siafundclaimoutputids on the explorer API
// As a temporal solution, I use the explorer module. Even if broken, it should be able to provide this info

// Only if the explorer module is available
if (params.explorerAvailable == true) {

// We make this call as a "one try" method (the "true"), as the `/explorer/hashes` API sometimes might fall without any specific reason,
// and it is better just going to the failover solution of getting the full block
var api = await Commons.MegaRouter(params, 0, "/explorer/hashes/" + txHash, true)

try {
var outputId = api.transaction.siafundclaimoutputids[0]
} catch (e) {
// In some cases, the /explorer/hashes endpoint will return a wrong result without the output, if this happens, we instead
// collect the full block info, that is more costly
try {
var apiBlock = await Commons.MegaRouter(params, 0, '/explorer/blocks/' + height)
for (var i = 0; i < apiBlock.block.transactions.length; i++) {
if (apiBlock.block.transactions[i].id == txHash) {
var outputId = apiBlock.block.transactions[i].siafundclaimoutputids[0]
}
}

} catch (e) {
// Stops the script to allow a graceful restart by Forever/PM2 if something unexpected stopped the indexer. As the script runs also the API server and the
// database connector, otherwise the script would keep running
console.log(e)
console.log("*** Forcing the stop of the script in 20 seconds")
await Commons.Delay(20000); // Async timeout
process.exit()
}
}


// Push output to the batch: new output and outputID as a hash type
var sqlQuery = SqlComposer.CreateOutput(
params, "Outputs-SC", outputId, BigInt(senderClaim), senderClaimAddress, height)
sqlBatch.push(sqlQuery)

var toAddHashTypes = "('" + outputId + "','output','')"
var sqlQuery = SqlComposer.InsertSql(params, "HashTypes", toAddHashTypes, outputId)
sqlBatch.push(sqlQuery)
}
var toAddHashTypes = "('" + outputId + "','output','')"
var sqlQuery = SqlComposer.InsertSql(params, "HashTypes", toAddHashTypes, outputId)
sqlBatch.push(sqlQuery)

// Return the updated batch
return sqlBatch
}


exports.MiningPoolPayoutOutput = async function(params, sqlBatch, api, height, payoutAddress) {
exports.MiningPoolPayoutOutput = async function(params, sqlBatch, api, height, payoutAddress, index) {
// Identifies the output recepient of the block reward
const outputId = CalculateMinerOutputID(api.id, index);

// Info currently missing from the consensus/block API. As a temporal solution, this relies on a call to the explorer API until
// Nebulous adds this info to the consensus API. If the usuer decides to not use the explorer module, this infor will not be used

// Only if the explorer module is available
if (params.explorerAvailable == true) {
try {
var apiExplorer = await Commons.MegaRouter(params, 0, '/explorer/blocks/' + height)
var outputId = apiExplorer.block.minerpayoutids[0]

// New output
var sqlQuery = SqlComposer.CreateOutput(
params, "Outputs-SC", outputId, BigInt(api.minerpayouts[0].value), payoutAddress, height)
sqlBatch.push(sqlQuery)

// Output as hash type
var toAddHashTypes = "('" + outputId + "','output','')"
sqlBatch.push(SqlComposer.InsertSql(params, "HashTypes", toAddHashTypes, outputId))

} catch (e) {
console.log("//// Could not retrieve the block from the explorer API for the outputID of the mining pool payout. This output has been skipped")
console.log(e)
}
}
// New output
var sqlQuery = SqlComposer.CreateOutput(
params, "Outputs-SC", outputId, BigInt(api.minerpayouts[0].value), payoutAddress, height)
sqlBatch.push(sqlQuery)

// Output as hash type
var toAddHashTypes = "('" + outputId + "','output','')"
sqlBatch.push(SqlComposer.InsertSql(params, "HashTypes", toAddHashTypes, outputId))
return sqlBatch
}

Expand Down
8 changes: 5 additions & 3 deletions modules/siafunds.js
Original file line number Diff line number Diff line change
Expand Up @@ -78,7 +78,7 @@ exports.sfTransactionProcess = async function(params, apiblock, n, height, times
var senderClaimAddress = apiblock.transactions[i].rawtransaction.siafundinputs[j].claimunlockhash

// Claim output creation
newSql = await Outputs.SfClaimOutput(params, newSql, BigInt(senderClaim), senderClaimAddress, apiblock.transactions[i].id, apiblock.height)
newSql = await Outputs.SfClaimOutput(params, newSql, senderMatcher, BigInt(senderClaim), senderClaimAddress, apiblock.transactions[i].id, apiblock.height)

// Adding the claim to addressesImplicated with a flag: It is indexed, but we need the info to update the balance of the address
addressesImplicated.push({"hash": senderClaimAddress, "sc": senderClaim, "sf": 0, "txType": "SfClaim"})
Expand Down Expand Up @@ -149,9 +149,10 @@ exports.sfTransactionProcess = async function(params, apiblock, n, height, times
senderClaim = senderClaim * apiblock.transactions[n].siafundinputoutputs[j].value // We multiply it by the number of SF transacted
totalSCtransacted = totalSCtransacted + Math.floor(senderClaim)
var senderClaimAddress = apiblock.transactions[n].rawtransaction.siafundinputs[j].claimunlockhash
const siafundOutputID = apiblock.transactions[n].rawtransaction.siafundinputs[j].parentid

// Claim output creation
newSql = await Outputs.SfClaimOutput(params, newSql, BigInt(senderClaim), senderClaimAddress, apiblock.transactions[n].id, apiblock.height)
newSql = await Outputs.SfClaimOutput(params, newSql, siafundOutputID, BigInt(senderClaim), senderClaimAddress, apiblock.transactions[n].id, apiblock.height)

// Adding the claim to addressesImplicated with a flag: It is indexed, but we need the info to update the balance of the address
addressesImplicated.push({"hash": senderClaimAddress, "sc": senderClaim, "sf": 0, txType: "SfClaim"})
Expand Down Expand Up @@ -278,9 +279,10 @@ exports.sfSingleTransaction = async function(params, apiblock, n, height, timest
senderClaim = senderClaim * apiblock.transactions[n].siafundinputoutputs[j].value // We multiply it by the number of SF transacted
totalSCtransacted = totalSCtransacted + Math.floor(senderClaim)
var senderClaimAddress = apiblock.transactions[n].rawtransaction.siafundinputs[j].claimunlockhash
const siafundOutputID = apiblock.transactions[n].rawtransaction.siafundinputs[j].parentid

// Claim output creation
newSql = await Outputs.SfClaimOutput(params, newSql, BigInt(senderClaim), senderClaimAddress, apiblock.transactions[n].id, apiblock.height)
newSql = await Outputs.SfClaimOutput(params, newSql, siafundOutputID, BigInt(senderClaim), senderClaimAddress, apiblock.transactions[n].id, apiblock.height)

// Adding the claim to addressesImplicated with a flag of txType
addressesImplicated.push({"hash": senderClaimAddress, "sc": senderClaim, "sf": 0, txType: "SfClaim"})
Expand Down
1 change: 1 addition & 0 deletions modules/sql_async.js
Original file line number Diff line number Diff line change
Expand Up @@ -95,6 +95,7 @@ exports.BacthInsert = async function(params, sqlBatch, height, indexing) {
}

} else {
let failedItemizedQueries = 0;
// SQLite does not accept multiple queries on a single message round trip. We always do itemized indexing
// Besides, according to https://sqlite.org/np1queryprob.html this is not an inefficiency issue for SQLite
for (var i = 0; i < sqlBatch.length; i++) {
Expand Down