Skip to content
Open
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
6 changes: 5 additions & 1 deletion defi/l2/adapters/thirdParty.ts
Original file line number Diff line number Diff line change
Expand Up @@ -11,7 +11,7 @@ import PromisePool from "@supercharge/promise-pool";
let bridgePromises: { [bridge: string]: Promise<any> } = {};
const addresses: { [chain: Chain]: Address[] } = {};
allChainKeys.map((c: string) => (addresses[c] = []));
let doneAdapters: string[] = [];
let doneAdapters: string[] = []; // TODO: no, this is not how you track whether a function is called because of async nature of the calls, you need to use lodash.once or similar
let mappingDone: boolean = false;

const chainMap: { [chain: string]: string } = {
Expand Down Expand Up @@ -222,6 +222,8 @@ const adapters = { axelar, wormhole, celer, hyperlane, layerzero, flow, unit };
const filteredAddresses: { [chain: Chain]: Address[] } = {};

const tokenAddresses = async (): Promise<{ [chain: Chain]: Address[] }> => {
//QUESTION: how are the errors here handled?
Copy link
Collaborator

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

right now theyre handled down stream

Copy link
Member Author

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

unless you are checking for error objects in promisePool, they get ignored

Copy link
Member Author

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

that is something I hate about promisePool, if you use our sdk one, it throws error: https://github.com/DefiLlama/defillama-sdk/blob/more-chainApi-methods/src/util/promisePool.ts#L5

// TODO: you were right, now the code calls the same adapter list each time for each chain, need to use lodash.once or similar to ensure each adapter is called only once/we pull the list only once
await PromisePool.withConcurrency(5)
.for(Object.entries(adapters))
.process(async ([key, adapter]: any) => {
Expand All @@ -234,6 +236,8 @@ const tokenAddresses = async (): Promise<{ [chain: Chain]: Address[] }> => {

if (Object.keys(adapters).length == doneAdapters.length && mappingDone) return filteredAddresses;


// TODO: please add comments explaining the logic here
Object.keys(addresses).map((chain: string) => {
let chainAddresses =
chain in excluded ? addresses[chain].filter((t: string) => !excluded[chain].includes(t)) : addresses[chain];
Expand Down
9 changes: 8 additions & 1 deletion defi/l2/utils.ts
Original file line number Diff line number Diff line change
Expand Up @@ -60,6 +60,9 @@ async function restCallWrapper(request: () => Promise<any>, retries: number = 8,
}
throw new Error(`couldnt work ${name} call after retries!`);
}

// TODO: I am very unhappy that we keep repeating the price fetch code everywhere, it is there in coins part of the repo, here, in tvl repo, in dimensions, in defi/storeTvl, can you check if the one in sdk is adequate/else update it to something that can be used all these places
// I am also thinking of adding a pull all method, which will pull all token prices from ES, so we get all the prices in a single call for current timestamp
export async function getPrices(
readKeys: string[],
timestamp: number | "now"
Expand Down Expand Up @@ -120,6 +123,8 @@ export async function getPrices(

return aggregatedRes;
}

// TODO: same point as as getToken prices
export async function getMcaps(
readKeys: string[],
timestamp: number | "now"
Expand Down Expand Up @@ -379,10 +384,12 @@ export async function fetchSupplies(
throw new Error(`multicalling token supplies failed for chain ${chain} with ${e}`);
}
}

// TODO: please add comments explaining the logic here
export async function fetchBridgeTokenList(chain: Chain): Promise<Address[]> {
const j = Object.keys(incomingAssets).indexOf(chain);
try {
const tokens: Address[] = j == -1 ? [] : await Object.values(incomingAssets)[j]();
const tokens: Address[] = j == -1 ? [] : await Object.values(incomingAssets)[j](); // QUESTION: why not check incomingAssets[chain] directly? what s the rationale behind using indexOf and Object.values?
tokens.push(...((await fetchThirdPartyTokenList())[chain] ?? []));
let filteredTokens: Address[] =
chain in excluded ? tokens.filter((t: string) => !excluded[chain].includes(t)) : tokens;
Expand Down
32 changes: 27 additions & 5 deletions defi/l2/v2/index.ts
Original file line number Diff line number Diff line change
Expand Up @@ -18,7 +18,7 @@ import { additional, excluded } from "../adapters/manual";
import { storeHistoricalToDB } from "./storeToDb";
import { stablecoins } from "../../src/getProtocols";
import { metadata as rwaMetadata } from "../../src/rwa/protocols";
import { verifyChanges } from "../test";
import { verifyChanges } from "../test"; // TODO: rename this file, misleading name

const searchWidth = 10800; // 3hr

Expand Down Expand Up @@ -47,18 +47,19 @@ async function fetchNativeAndMcaps(
const allPrices: { [token: string]: CoinsApiData } = {};
const allMcaps: { [token: string]: McapsApiData } = {};

// QUESTION: how are errors here handled?
await PromisePool.withConcurrency(5)
.for(allChainKeys)
.process(async (chain: Chain) => {
if (Object.values(protocolBridgeIds).includes(chain)) return;
await withTimeout(1000 * 60 * (override ? 120 : 120), minted(chain)).catch(() => {
if (Object.values(protocolBridgeIds).includes(chain)) return; // TODO: why, can you add a comment here?
await withTimeout(1000 * 60 * (override ? 120 : 120), minted(chain)).catch(() => { // QUERSTION: why is the timeout so high? 2 hours? and why have the override condition and not use it?
throw new Error(`fetchMinted() timed out for ${chain}`);
});

async function minted(chain: Chain) {
try {
const start = new Date().getTime();
let storedTokens = await fetchAllTokens(chain);
let storedTokens = await fetchAllTokens(chain); // TODO: if cardano has special fetch, why do generic fetch for it here?

if (chain == "cardano") storedTokens = await fetchAdaTokens();

Expand All @@ -69,8 +70,11 @@ async function fetchNativeAndMcaps(

let prices: { [token: string]: CoinsApiData } = {};
try {
prices = await getR2JSONString(`prices/${chain}.json`);
// QUESTION: how often is this script run? if hourly we might have to find a different solution for files here
// isnt this a lot of writes?
prices = await getR2JSONString(`prices/${chain}.json`);
} catch (e) {
// QUESTION: hmm, what does happen if the price is already cached? are you deleting old prices somewhere else in the code?
Copy link
Collaborator

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

this is just for debug while the works in dev

Copy link
Collaborator

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

same with the other R2s below, theyre cached for ages but it speeds up the process significantly for debug

console.log(`${chain} prices not cached, fetching`);
prices = await getPrices(
storedTokens.map((t: string) => normalizeKey(t.startsWith("coingecko:") ? t : `${chain}:${t}`)),
Expand All @@ -88,6 +92,7 @@ async function fetchNativeAndMcaps(
try {
mcaps = await getR2JSONString(`mcaps/${chain}.json`);
} catch (e) {
// QUESTION: hmm, what does happen if the mcap is already cached? are you deleting old mcaps somewhere else in the code?
console.log(`${chain} mcaps not cached, fetching`);
mcaps = await getMcaps(Object.keys(prices), timestamp);
await storeR2JSONString(`mcaps/${chain}.json`, JSON.stringify(mcaps));
Expand Down Expand Up @@ -127,9 +132,11 @@ async function fetchNativeAndMcaps(
return { chainData, allPrices, allMcaps };
}

// TODO: please add comments explaining the logic, else it is impossible to follow
async function fetchIncomingAssetsList(): Promise<{ [chain: Chain]: string[] }> {
const incomingAssets: { [chain: Chain]: string[] } = {};

// QUESTION: how are errors here handled?
await PromisePool.withConcurrency(5)
.for(allChainKeys)
.process(async (chain: Chain) => {
Expand Down Expand Up @@ -161,6 +168,8 @@ async function fetchOutgoingAmountsFromDB(timestamp: number): Promise<{
destinationChainAmounts: { [chain: Chain]: { [token: string]: BigNumber } };
}> {
const ids: string[] = [...Object.keys(canonicalBridgeIds), ...Object.keys(protocolBridgeIds)];
// TODO: use promisepool here
// QUESTION: why is data being pulled from ddb instead of postgres? especially for latest protocol data, it can be fetched for all the ids with a single query
const tvls: any[] = await Promise.all(
ids.map((i: string) =>
getTVLOfRecordClosestToTimestamp(
Expand All @@ -174,6 +183,8 @@ async function fetchOutgoingAmountsFromDB(timestamp: number): Promise<{
const sourceChainAmounts: { [chain: Chain]: { [token: string]: BigNumber } } = {};
const protocolAmounts: { [chain: Chain]: { [token: string]: BigNumber } } = {};
const destinationChainAmounts: { [chain: Chain]: { [token: string]: BigNumber } } = {};

// TODO: add comments here or at the top of the function explaining the logic, so it will be easier for future llamas to understand
tvls.map((b: any, i: number) => {
Object.keys(b).map((chain: string) => {
if (excludedTvlKeys.includes(chain)) return;
Expand Down Expand Up @@ -205,6 +216,7 @@ async function fetchOutgoingAmountsFromDB(timestamp: number): Promise<{
return { sourceChainAmounts, protocolAmounts, destinationChainAmounts };
}

// TODO: please add comments explaining the logic here
async function fetchExcludedAmounts(timestamp: number) {
const excludedTokensAndOwners: { [chain: string]: [string, string][] } = {
base: [
Expand All @@ -224,6 +236,8 @@ async function fetchExcludedAmounts(timestamp: number) {
};

const excludedAmounts: { [chain: string]: { [token: string]: BigNumber } } = {};

// TODO use PromisePool here to be future proof, again errors are not handled here, a bug here will fail the whole run
await Promise.all(
Object.keys(excludedTokensAndOwners).map(async (chain: string) => {
const block = await getBlock(chain, (timestamp == 0 ? getCurrentUnixTimestamp() : timestamp) - 10);
Expand All @@ -235,6 +249,8 @@ async function fetchExcludedAmounts(timestamp: number) {
block: block.number,
});

// QUESTION: should you start creating sdk.Balances object and start using it here? instead of directly using BigNumber // this is a suggestion, might make the code easier to read
// single balances object can have all the tokens of all chains, easier to pass around and subtract as well, it is inbuilt
excludedTokensAndOwners[chain].map(([token, _], i) => {
if (!excludedAmounts[chain]) excludedAmounts[chain] = {};
if (!excludedAmounts[chain][token]) excludedAmounts[chain][token] = zero;
Expand All @@ -247,13 +263,16 @@ async function fetchExcludedAmounts(timestamp: number) {
}

async function fetchStablecoinSymbols() {
// TODO: failure is not handled here
const { peggedAssets } = await fetch("https://stablecoins.llama.fi/stablecoins").then((r) => r.json());
const symbols = peggedAssets.map((s: any) => s.symbol);
const allSymbols = [...new Set([...symbols, ...stablecoins].map((t) => t.toUpperCase()))];
return allSymbols;
}

async function fetchLstSymbols() {
// TODO: failure is not handled here
// what I would do for these two cases is, use a cached fetch, so if the fetch fails, we read from the local cache, so we are fine even if the api goes down
const assets = await fetch("https://yields.llama.fi/lsdRates").then((r) => r.json());
const symbols = assets.map((s: any) => s.symbol.toUpperCase());
return symbols;
Expand Down Expand Up @@ -307,6 +326,8 @@ async function main() {
const rwaSymbols = fetchRwaSymbols();

const nativeDataAfterDeductions: { [chain: Chain]: { [token: string]: BigNumber } } = {};

// TODO: please add comment for each block of code explaining the logic, so future llamas can understand, it would help you as well
allChainKeys.map((chain: Chain) => {
if (Object.values(protocolBridgeIds).includes(chain)) return;
nativeDataAfterDeductions[chain] = {};
Expand Down Expand Up @@ -354,6 +375,7 @@ async function main() {

const rawData: FinalData = {};
allChainKeys.map((chain: Chain) => {
// TODO: this structure is repeated multiple times in the code, can you create a function for it
rawData[chain] = {
canonical: { breakdown: {} },
thirdParty: { breakdown: {} },
Expand Down
1 change: 1 addition & 0 deletions defi/src/utils/shared/bridgedTvlPostgres.ts
Original file line number Diff line number Diff line change
Expand Up @@ -85,6 +85,7 @@ export async function storeNotTokens(tokens: string[]) {

export async function fetchAllTokens(chain: Chain): Promise<string[]> {
const sql = await getPgConnection();
// QUESTION: how difficult would it be to replace this with ES, also have no clue on alltokens schema, same for nottokens
const res = await queryPostgresWithRetry(
sql`
select token from alltokens
Expand Down