Skip to content

Commit

Permalink
Merging Pains: Fix token list duplication for good (#3760)
Browse files Browse the repository at this point in the history
This finally dropped my local (rather large due to certain observed
accounts) redux store from 25MB to 5MB.

Latest build:
[extension-builds-3760](https://github.com/tahowallet/extension/suites/32952801958/artifacts/2418089821)
(as of Sun, 12 Jan 2025 04:47:49 GMT).
  • Loading branch information
Shadowfiend authored Jan 12, 2025
2 parents e8f08e3 + 8c8b5fb commit c6d7391
Show file tree
Hide file tree
Showing 21 changed files with 481 additions and 365 deletions.
19 changes: 0 additions & 19 deletions background/lib/asset-similarity.ts
Original file line number Diff line number Diff line change
Expand Up @@ -90,22 +90,3 @@ export function findClosestAssetIndex(

return undefined
}

/**
* Merges the information about two assets. Mostly focused on merging metadata.
*/
export function mergeAssets(asset1: AnyAsset, asset2: AnyAsset): AnyAsset {
return {
...asset1,
...("coinType" in asset1 ? { coinType: asset1.coinType } : {}),
...("coinType" in asset2 ? { coinType: asset2.coinType } : {}),
metadata: {
...asset1.metadata,
...asset2.metadata,
tokenLists:
asset1.metadata?.tokenLists?.concat(
asset2.metadata?.tokenLists ?? [],
) ?? [],
},
}
}
40 changes: 30 additions & 10 deletions background/lib/gas.ts
Original file line number Diff line number Diff line change
Expand Up @@ -14,6 +14,7 @@ import {
POLYGON,
} from "../constants/networks"
import { gweiToWei } from "./utils"
import { MINUTE } from "../constants"

// We can't use destructuring because webpack has to replace all instances of
// `process.env` variables in the bundled output
Expand Down Expand Up @@ -142,26 +143,45 @@ const getLegacyGasPrices = async (
dataSource: "local",
})

let lastSuccessfulBlocknativeAttempt: number = 0
let lastFailedBlocknativeAttempt: number = 0
const BLOCKNATIVE_RETRY_INTERVAL = 10 * MINUTE

export default async function getBlockPrices(
network: EVMNetwork,
provider: Provider,
): Promise<BlockPrices> {
// if BlockNative is configured and we're on mainnet, prefer their gas service
// if BlockNative is configured and we're on mainnet, prefer their gas service.
if (
typeof BLOCKNATIVE_API_KEY !== "undefined" &&
network.chainID === ETHEREUM.chainID
) {
try {
if (!blocknative) {
blocknative = Blocknative.connect(
BLOCKNATIVE_API_KEY,
BlocknativeNetworkIds.ethereum.mainnet,
)
// If the last attempt was a failure and we last succeeded less than
// BLOCKNATIVE_RETRY_INTERVAL ago, leave Blocknative alone and retry later.
if (
lastSuccessfulBlocknativeAttempt > lastFailedBlocknativeAttempt ||
lastFailedBlocknativeAttempt - lastSuccessfulBlocknativeAttempt >
BLOCKNATIVE_RETRY_INTERVAL
) {
try {
if (!blocknative) {
blocknative = Blocknative.connect(
BLOCKNATIVE_API_KEY,
BlocknativeNetworkIds.ethereum.mainnet,
)
}
const prices = await blocknative.getBlockPrices()
lastSuccessfulBlocknativeAttempt = Date.now()

return prices
} catch (err) {
logger.error("Error getting block prices from BlockNative", err)
}
return await blocknative.getBlockPrices()
} catch (err) {
logger.error("Error getting block prices from BlockNative", err)
}

// If we fall through, treat it as a failure and increment the last failure
// timestamp.
lastFailedBlocknativeAttempt = Date.now()
}

const [currentBlock, feeData] = await Promise.all([
Expand Down
105 changes: 89 additions & 16 deletions background/lib/logger.ts
Original file line number Diff line number Diff line change
@@ -1,15 +1,31 @@
import browser from "webextension-polyfill"

// Ignore the no-console lint rule since this file is meant to funnel output to
// the console.
/* eslint-disable no-console */

const HOUR = 1000 * 60 * 60

const store = {
get(key: string): string {
return "" // return window.localStorage.getItem(key) ?? ""
async get(key: string): Promise<string> {
const realKey = `logs-${key}`
return String((await browser.storage.local.get(realKey))[realKey]) ?? ""
},
async getAll<T extends string[], Keys extends T[number]>(
...keys: Keys[]
): Promise<{ [key in Keys]: string }> {
const keyResults = await browser.storage.local.get(
keys.map((key) => `logs-${key}`),
)
return Object.fromEntries(
keys.map<[Keys, string]>((key) => [
key,
String(keyResults[`logs-${key}`]) ?? "",
]),
) as { [key in Keys]: string }
},
set(key: string, value: string): void {
// window.localStorage.setItem(key, value)
async set(key: string, value: string): Promise<void> {
browser.storage.local.set({ [`logs-${key}`]: value })
},
}

Expand Down Expand Up @@ -170,6 +186,38 @@ class Logger {
this.logEvent(LogLevel.error, input)
}

/**
* Helper for a common pattern where `Promise.allSettled` is used to
* run multiple promises in parallel to resolve data, and some of the
* promises may fail and should be logged as such.
*
* If no promises fail, nothing is logged. If promises fail, the
* promise is logged alongside the corresponding entry in
* `perResultData`, if any. This allows for `perResultData` to include
* additional information about the failed promise. For example, if the
* promises are resolving information about a set of addresses, the
* perResultData might include the address corresponding to each promise, so
* that a failure to resolve information about an address can include that
* address alongside the failure for further debugging.
*/
errorLogRejectedPromises<T>(
logPrefix: string,
settledPromises: PromiseSettledResult<T>[],
perResultData: unknown[] = [],
): void {
const rejectedLogData = settledPromises.reduce<unknown[]>(
(logData, settledPromise, i) =>
settledPromise.status === "rejected"
? [...logData, settledPromise, ...perResultData.slice(i, i + 1)]
: logData,
[],
)

if (rejectedLogData.length > 0) {
this.error(logPrefix, rejectedLogData)
}
}

buildError(...input: unknown[]): Error {
this.error(...input)
return new Error(input.join(" "))
Expand Down Expand Up @@ -218,7 +266,7 @@ class Logger {
this.saveLog(level, isoDateString, logLabel, input, stackTrace)
}

private saveLog(
private async saveLog(
level: LogLevel,
isoDateString: string,
logLabel: string,
Expand Down Expand Up @@ -249,14 +297,13 @@ class Logger {
.split("\n")
.join("\n ")

const logKey = `logs-${level}`
const existingLogs = this.store.get(logKey)
const existingLogs = await this.store.get(level)

const fullPrefix = `[${isoDateString}] [${level.toUpperCase()}:${
this.contextId
}]`

// Note: we have to do everything from here to `storage.local.set`
// Note: we have to do everything from here to `store.set`
// synchronously, i.e. no promises, otherwise we risk losing logs between
// background and content/UI scripts.
const purgedData = purgeSensitiveFailSafe(logData)
Expand All @@ -266,20 +313,20 @@ class Logger {
// usage.
.slice(-50000)

this.store.set(logKey, updatedLogs)
await this.store.set(level, updatedLogs)
}

serializeLogs(): string {
async serializeLogs(): Promise<string> {
type StoredLogData = {
-readonly [level in Exclude<LogLevel, LogLevel.off>]: string
}

const logs: StoredLogData = {
debug: this.store.get("logs-debug"),
info: this.store.get("logs-info"),
warn: this.store.get("logs-warn"),
error: this.store.get("logs-error"),
}
const logs: StoredLogData = await this.store.getAll(
"debug",
"info",
"warn",
"error",
)

if (Object.values(logs).every((entry) => entry === "")) {
return "[NO LOGS FOUND]"
Expand Down Expand Up @@ -327,6 +374,32 @@ class Logger {

const logger = new Logger()

/**
* Takes the same parameters as `Logger.errorLogRejectedPromises` and logs any
* failed promises by calling that.
*
* This helper also unwraps all *fulfilled* promises and returns the results. That
* allows a caller who wants to log failures and then deal with successes in one
* step to call this function once with the `await Promise.allSettled` call as the
* second parameter, and assign the result directly to a variable that is used
* in downstream work.
*/
export function logRejectedAndReturnFulfilledResults<T>(
logPrefix: string,
settledPromises: PromiseSettledResult<T>[],
perResultData: unknown[] = [],
): T[] {
logger.errorLogRejectedPromises(logPrefix, settledPromises, perResultData)

return settledPromises.reduce<T[]>(
(fulfilledResults, settledPromise) =>
settledPromise.status === "fulfilled"
? [...fulfilledResults, settledPromise.value]
: fulfilledResults,
[],
)
}

export const serializeLogs = logger.serializeLogs.bind(logger)

export default logger
29 changes: 17 additions & 12 deletions background/lib/priceOracle.ts
Original file line number Diff line number Diff line change
Expand Up @@ -31,7 +31,7 @@ import {
import { toFixedPoint } from "./fixed-point"
import SerialFallbackProvider from "../services/chain/serial-fallback-provider"
import { EVMNetwork } from "../networks"
import logger from "./logger"
import logger, { logRejectedAndReturnFulfilledResults } from "./logger"

// The size of a batch of on-chain price lookups. Too high and the request will
// fail due to running out of gas, as eth_call is still subject to gas limits.
Expand Down Expand Up @@ -221,24 +221,29 @@ export async function getUSDPriceForTokens(
[contractAddress: string]: UnitPricePoint<FungibleAsset>
}> {
if (assets.length > BATCH_SIZE) {
const batches = _.range(0, assets.length / BATCH_SIZE).map((batch) =>
assets.slice(batch * BATCH_SIZE, batch * BATCH_SIZE + BATCH_SIZE),
)
logger.debug(
"Batching assets price lookup by length",
assets.length,
BATCH_SIZE,
batches,
)
// Batch assets when we get to BATCH_SIZE so we're not trying to construct
// megatransactions with 10s and 100s of assets that blow up RPC nodes.
return (
await Promise.all(
_.range(0, assets.length / BATCH_SIZE)
.map((batch) =>
assets.slice(0 + batch * BATCH_SIZE, batch * BATCH_SIZE),
)
.map((subAssets) =>
getUSDPriceForTokens(subAssets, network, provider),
),
)
).reduce((allPrices, pricesSubset) => ({ ...allPrices, ...pricesSubset }))
return logRejectedAndReturnFulfilledResults(
"Some batch asset price lookups failed",
await Promise.allSettled(
batches.map((subAssets) =>
getUSDPriceForTokens(subAssets, network, provider),
),
),
batches,
).reduce(
(allPrices, pricesSubset) => ({ ...allPrices, ...pricesSubset }),
{},
)
}

const tokenRates = await getRatesForTokens(assets, provider, network)
Expand Down
18 changes: 0 additions & 18 deletions background/lib/tests/asset-similarity.unit.test.ts

This file was deleted.

26 changes: 21 additions & 5 deletions background/lib/token-lists.ts
Original file line number Diff line number Diff line change
Expand Up @@ -6,6 +6,7 @@ import {
FungibleAsset,
SmartContractFungibleAsset,
TokenListAndReference,
TokenListCitation,
} from "../assets"
import { isValidUniswapTokenListResponse } from "./validate"
import { EVMNetwork } from "../networks"
Expand Down Expand Up @@ -146,12 +147,27 @@ export function mergeAssets<T extends FungibleAsset>(
metadata: {
...matchingAsset.metadata,
...asset.metadata,
tokenLists: Array.from(
new Set(
(matchingAsset.metadata?.tokenLists || [])?.concat(
asset.metadata?.tokenLists ?? [],
// Deduplicate token lists by first grouping by URL then
// choosing the first list with a given URL that also has a
// logoURL, or if no token list for that URL has a logoURL
// then the first token list that has that URL.
tokenLists: Object.values(
(matchingAsset.metadata?.tokenLists || [])
?.concat(asset.metadata?.tokenLists ?? [])
.reduce<{ [url: string]: TokenListCitation[] }>(
(citations, tokenListCitation) => ({
...citations,
[tokenListCitation.url]: [
...(citations[tokenListCitation.url] ?? []),
tokenListCitation,
],
}),
{},
),
).values(),
).flatMap(
(duplicateList) =>
duplicateList.find((list) => list.logoURL !== undefined) ??
duplicateList[0],
),
},
}
Expand Down
4 changes: 2 additions & 2 deletions background/redux-slices/accounts.ts
Original file line number Diff line number Diff line change
Expand Up @@ -420,7 +420,7 @@ const accountSlice = createSlice({
/**
* Updates cached SmartContracts metadata
*/
updateAssetReferences: (
updateAccountAssetReferences: (
immerState,
{ payload: assets }: { payload: SmartContractFungibleAsset[] },
) => {
Expand Down Expand Up @@ -492,7 +492,7 @@ export const {
updateAccountBalance,
updateAccountName,
updateENSAvatar,
updateAssetReferences,
updateAccountAssetReferences,
removeAssetReferences,
removeChainBalances,
} = accountSlice.actions
Expand Down
Loading

0 comments on commit c6d7391

Please sign in to comment.