From 45b5bd5a84acfbe397e8d49c1a4678da90c3231d Mon Sep 17 00:00:00 2001 From: antoncoding Date: Sat, 14 Mar 2026 01:00:13 +0800 Subject: [PATCH 1/5] feat: envio (monarch api) data source --- .env.local.example | 8 + AGENTS.md | 6 + src/config/dataSources.ts | 29 ++ src/data-sources/envio/events.ts | 268 ++++++++++++ src/data-sources/envio/fetchers.ts | 68 +++ src/data-sources/envio/historical.ts | 324 ++++++++++++++ src/data-sources/envio/market-activity.ts | 146 +++++++ src/data-sources/envio/market-participants.ts | 170 ++++++++ src/data-sources/envio/market.ts | 345 +++++++++++++++ src/data-sources/envio/positions.ts | 159 +++++++ src/data-sources/envio/transactions.ts | 258 +++++++++++ src/data-sources/envio/utils.ts | 45 ++ src/data-sources/market-activity.ts | 91 ++++ src/data-sources/market-catalog.ts | 127 ++++++ src/data-sources/market-details.ts | 106 +++++ src/data-sources/market-historical.ts | 46 ++ src/data-sources/market-participants.ts | 60 +++ src/data-sources/morpho-api/market.ts | 23 +- src/data-sources/position-markets.ts | 60 +++ .../shared/historical-chain-context.ts | 114 +++++ src/data-sources/shared/market-merge.ts | 50 +++ .../shared/market-rate-enrichment.ts | 412 ++++++++++++++++++ src/data-sources/shared/market-usd.ts | 157 +++++++ src/data-sources/shared/market-visibility.ts | 29 ++ src/data-sources/shared/source-debug.ts | 42 ++ src/data-sources/shared/token-metadata.ts | 198 +++++++++ src/data-sources/subgraph/market.ts | 39 +- src/data-sources/user-position.ts | 38 ++ .../components/charts/volume-chart.tsx | 24 +- .../components/table/market-row-detail.tsx | 2 +- .../components/table/market-table-body.tsx | 6 +- .../ui-lab/fixtures/market-fixtures.ts | 1 + src/graphql/envio-queries.ts | 218 +++++++++ src/hooks/queries/fetchUserTransactions.ts | 20 +- src/hooks/queries/useMarketMetricsQuery.ts | 1 - src/hooks/queries/useMarketsQuery.ts | 65 +-- src/hooks/queries/useTokensQuery.ts | 90 +--- src/hooks/useAllMarketPositions.ts | 42 +- src/hooks/useMarketBorrowers.ts | 29 +- src/hooks/useMarketBorrows.ts | 29 +- src/hooks/useMarketData.ts | 58 +-- src/hooks/useMarketHistoricalData.ts | 42 +- src/hooks/useMarketLiquidations.ts | 30 +- src/hooks/useMarketSuppliers.ts | 29 +- src/hooks/useMarketSupplies.ts | 29 +- src/hooks/useProcessedMarkets.ts | 107 +---- src/hooks/useTokenPrices.ts | 30 ++ src/hooks/useUserPosition.ts | 58 +-- src/hooks/useUserPositions.ts | 79 +--- src/utils/marketIdentity.ts | 3 + src/utils/rpc.ts | 39 +- src/utils/tokenCatalog.ts | 129 ++++++ src/utils/tokens.ts | 6 +- src/utils/types.ts | 3 + 54 files changed, 3914 insertions(+), 673 deletions(-) create mode 100644 src/data-sources/envio/events.ts create mode 100644 src/data-sources/envio/fetchers.ts create mode 100644 src/data-sources/envio/historical.ts create mode 100644 src/data-sources/envio/market-activity.ts create mode 100644 src/data-sources/envio/market-participants.ts create mode 100644 src/data-sources/envio/market.ts create mode 100644 src/data-sources/envio/positions.ts create mode 100644 src/data-sources/envio/transactions.ts create mode 100644 src/data-sources/envio/utils.ts create mode 100644 src/data-sources/market-activity.ts create mode 100644 src/data-sources/market-catalog.ts create mode 100644 src/data-sources/market-details.ts create mode 100644 src/data-sources/market-historical.ts create mode 100644 src/data-sources/market-participants.ts create mode 100644 src/data-sources/position-markets.ts create mode 100644 src/data-sources/shared/historical-chain-context.ts create mode 100644 src/data-sources/shared/market-merge.ts create mode 100644 src/data-sources/shared/market-rate-enrichment.ts create mode 100644 src/data-sources/shared/market-usd.ts create mode 100644 src/data-sources/shared/market-visibility.ts create mode 100644 src/data-sources/shared/source-debug.ts create mode 100644 src/data-sources/shared/token-metadata.ts create mode 100644 src/data-sources/user-position.ts create mode 100644 src/graphql/envio-queries.ts create mode 100644 src/utils/marketIdentity.ts create mode 100644 src/utils/tokenCatalog.ts diff --git a/.env.local.example b/.env.local.example index 45f3da81..0fddd083 100644 --- a/.env.local.example +++ b/.env.local.example @@ -48,6 +48,14 @@ NEXT_PUBLIC_MONAD_RPC= NEXT_PUBLIC_THEGRAPH_API_KEY= +# ==================== Envio Indexer ==================== +# Public browser-facing Envio / HyperIndex GraphQL endpoint +NEXT_PUBLIC_ENVIO_INDEXER_ENDPOINT= +# Optional lightweight API key for the Envio endpoint +NEXT_PUBLIC_ENVIO_INDEXER_API_KEY= +# Optional: set to "true" to log source selection / fallback reasons in the browser console +NEXT_PUBLIC_DEBUG_DATA_SOURCES= + # Used for balance API ALCHEMY_API_KEY= diff --git a/AGENTS.md b/AGENTS.md index 34c07b5c..ef30427a 100644 --- a/AGENTS.md +++ b/AGENTS.md @@ -165,6 +165,12 @@ When touching transaction and position flows, validation MUST include all releva 29. **Preview prop integrity**: any position/risk preview component that separates current and projected props must receive quote- or input-derived projected balances through dedicated `projected*` props while preserving live balances in `current*` props, so amount rows, LTV deltas, and liquidation metrics stay synchronized instead of mixing current and projected states. 30. **Fee preview consistency**: transaction previews that show protocol/app fees must derive token/USD display from shared fee-display helpers, use compact token amounts with explicit full-value hover content, threshold tiny USD values as `< $0.01` while preserving exact USD on hover, and avoid ad hoc per-modal formatting drift. 31. **Market list first-paint integrity**: shared multi-chain market list queries must not let a single slow chain or slow fallback path block first paint indefinitely; network fetches should use bounded request timeouts that account for fallback coverage, and any fallback path used for first paint must preserve market completeness (no truncated `first: 1000` fallback). Once page 1 reveals total count, remaining pagination should be fetched in parallel or bounded parallel batches instead of strict sequential loops. +32. **Primary/fallback source integrity**: when a shared market or position adapter has a configured primary indexer, it must not issue legacy-source requests in parallel “just in case.” Legacy APIs are fallback-only, fallback decisions must be logged with an explicit reason, and token/market blacklist gates must be applied before primary results reach shared catalogs or detail loaders. +33. **Token metadata batching integrity**: market and position flows that need token symbol/name/decimals or token-icon discovery must resolve them through shared batched token-catalog and metadata chokepoints. Do not issue per-market ERC20 metadata RPCs inside loops when the same addresses can be deduped and multicall-batched per chain. +34. **Historical enrichment RPC integrity**: shared market-list/detail/history enrichment must use the active custom RPC configuration for all historical block/state reads, and bounded timeouts must preserve partial per-chain results instead of wrapping the entire multi-chain enrichment job in one timeout that blanks every market. +35. **USD price provenance integrity**: market-level USD display state must distinguish direct fetched token/API prices, peg-based hardcoded fallback estimates, and missing prices. Tooltip and trust gates must key off that explicit provenance, not a generic “has USD” boolean that conflates fallback and absence. +36. **RPC dedupe and batching integrity**: shared RPC chokepoints must cache per-endpoint viem clients, enable transport-level JSON-RPC batching for parallel reads, and dedupe in-flight chain-wide historical/token-metadata jobs so multiple consumers cannot multiply identical RPC bursts. +37. **Historical chart unit integrity**: shared historical adapters must keep each series on one explicit unit contract across all sources. Asset-volume series must not mix raw smallest-unit values and display-unit decimals between sources, and chart consumers must tolerate stale cached points during contract transitions. ### REQUIRED: Regression Rule Capture diff --git a/src/config/dataSources.ts b/src/config/dataSources.ts index f45fb93b..07566ca6 100644 --- a/src/config/dataSources.ts +++ b/src/config/dataSources.ts @@ -1,5 +1,10 @@ import { SupportedNetworks } from '@/utils/networks'; +type EnvioIndexerConfig = { + endpoint: string; + apiKey?: string; +}; + /** * Check if a network supports Morpho API as a data source */ @@ -18,3 +23,27 @@ export const supportsMorphoApi = (network: SupportedNetworks): boolean => { return false; } }; + +const getTrimmedEnv = (value: string | undefined): string | undefined => { + const trimmed = value?.trim(); + return trimmed ? trimmed : undefined; +}; + +export const getEnvioIndexerConfig = (): EnvioIndexerConfig | null => { + const endpoint = getTrimmedEnv(process.env.NEXT_PUBLIC_ENVIO_INDEXER_ENDPOINT); + + if (!endpoint) { + return null; + } + + const apiKey = getTrimmedEnv(process.env.NEXT_PUBLIC_ENVIO_INDEXER_API_KEY); + + return { + endpoint, + apiKey, + }; +}; + +export const hasEnvioIndexer = (): boolean => { + return getEnvioIndexerConfig() !== null; +}; diff --git a/src/data-sources/envio/events.ts b/src/data-sources/envio/events.ts new file mode 100644 index 00000000..97b97060 --- /dev/null +++ b/src/data-sources/envio/events.ts @@ -0,0 +1,268 @@ +import { + envioBorrowEventsQuery, + envioBorrowRateUpdatesQuery, + envioLiquidationsQuery, + envioRepayEventsQuery, + envioSupplyCollateralEventsQuery, + envioSupplyEventsQuery, + envioWithdrawCollateralEventsQuery, + envioWithdrawEventsQuery, +} from '@/graphql/envio-queries'; +import type { SupportedNetworks } from '@/utils/networks'; +import { envioGraphqlFetcher } from './fetchers'; +import { fetchAllEnvioPages } from './utils'; + +const ENVIO_EVENTS_PAGE_SIZE = 500; +const ENVIO_EVENTS_MAX_ITEMS = 1000; +const ENVIO_EVENTS_TIMEOUT_MS = 15_000; + +export type EnvioLoanEventRow = { + assets: string | number; + chainId: number; + market_id: string; + onBehalf: string; + shares?: string | number; + timestamp: string | number; + txHash: string; +}; + +export type EnvioWithdrawEventRow = EnvioLoanEventRow & { + receiver?: string; +}; + +export type EnvioLiquidationEventRow = { + badDebtAssets: string | number; + borrower: string; + caller: string; + chainId: number; + market_id: string; + repaidAssets: string | number; + repaidShares?: string | number; + seizedAssets: string | number; + timestamp: string | number; + txHash: string; +}; + +export type EnvioBorrowRateUpdateRow = { + avgBorrowRate: string | number; + chainId: number; + market_id: string; + rateAtTarget: string | number; + timestamp: string | number; + txHash: string; +}; + +type EnvioLoanEventsResponse = { + data?: { + Morpho_Borrow?: EnvioLoanEventRow[]; + Morpho_Repay?: EnvioLoanEventRow[]; + Morpho_Supply?: EnvioLoanEventRow[]; + Morpho_SupplyCollateral?: EnvioLoanEventRow[]; + Morpho_Withdraw?: EnvioWithdrawEventRow[]; + Morpho_WithdrawCollateral?: EnvioWithdrawEventRow[]; + }; +}; + +type EnvioLiquidationsResponse = { + data?: { + Morpho_Liquidate?: EnvioLiquidationEventRow[]; + }; +}; + +type EnvioBorrowRateUpdatesResponse = { + data?: { + AdaptiveCurveIrm_BorrowRateUpdate?: EnvioBorrowRateUpdateRow[]; + }; +}; + +const fetchEnvioLoanEvents = async >({ + field, + limit, + offset, + query, + where, +}: { + field: T; + limit: number; + offset: number; + query: string; + where: Record; +}): Promise[T]>> => { + const response = await envioGraphqlFetcher( + query, + { + limit, + offset, + where, + }, + { + timeoutMs: ENVIO_EVENTS_TIMEOUT_MS, + }, + ); + + return (response.data?.[field] ?? []) as NonNullable[T]>; +}; + +const fetchEnvioLiquidationEventsPage = async (limit: number, offset: number, where: Record) => { + const response = await envioGraphqlFetcher( + envioLiquidationsQuery, + { + limit, + offset, + where, + }, + { + timeoutMs: ENVIO_EVENTS_TIMEOUT_MS, + }, + ); + + return response.data?.Morpho_Liquidate ?? []; +}; + +const fetchEnvioBorrowRateUpdatesPage = async (limit: number, offset: number, where: Record) => { + const response = await envioGraphqlFetcher( + envioBorrowRateUpdatesQuery, + { + limit, + offset, + where, + }, + { + timeoutMs: ENVIO_EVENTS_TIMEOUT_MS, + }, + ); + + return response.data?.AdaptiveCurveIrm_BorrowRateUpdate ?? []; +}; + +export const fetchEnvioSupplyRows = async (where: Record): Promise => { + return fetchAllEnvioPages({ + fetchPage: async (limit, offset) => + fetchEnvioLoanEvents({ + field: 'Morpho_Supply', + limit, + offset, + query: envioSupplyEventsQuery, + where, + }), + maxItems: ENVIO_EVENTS_MAX_ITEMS, + pageSize: ENVIO_EVENTS_PAGE_SIZE, + }); +}; + +export const fetchEnvioWithdrawRows = async (where: Record): Promise => { + return fetchAllEnvioPages({ + fetchPage: async (limit, offset) => + fetchEnvioLoanEvents({ + field: 'Morpho_Withdraw', + limit, + offset, + query: envioWithdrawEventsQuery, + where, + }), + maxItems: ENVIO_EVENTS_MAX_ITEMS, + pageSize: ENVIO_EVENTS_PAGE_SIZE, + }); +}; + +export const fetchEnvioBorrowRows = async (where: Record): Promise => { + return fetchAllEnvioPages({ + fetchPage: async (limit, offset) => + fetchEnvioLoanEvents({ + field: 'Morpho_Borrow', + limit, + offset, + query: envioBorrowEventsQuery, + where, + }), + maxItems: ENVIO_EVENTS_MAX_ITEMS, + pageSize: ENVIO_EVENTS_PAGE_SIZE, + }); +}; + +export const fetchEnvioRepayRows = async (where: Record): Promise => { + return fetchAllEnvioPages({ + fetchPage: async (limit, offset) => + fetchEnvioLoanEvents({ + field: 'Morpho_Repay', + limit, + offset, + query: envioRepayEventsQuery, + where, + }), + maxItems: ENVIO_EVENTS_MAX_ITEMS, + pageSize: ENVIO_EVENTS_PAGE_SIZE, + }); +}; + +export const fetchEnvioSupplyCollateralRows = async (where: Record): Promise => { + return fetchAllEnvioPages({ + fetchPage: async (limit, offset) => + fetchEnvioLoanEvents({ + field: 'Morpho_SupplyCollateral', + limit, + offset, + query: envioSupplyCollateralEventsQuery, + where, + }), + maxItems: ENVIO_EVENTS_MAX_ITEMS, + pageSize: ENVIO_EVENTS_PAGE_SIZE, + }); +}; + +export const fetchEnvioWithdrawCollateralRows = async (where: Record): Promise => { + return fetchAllEnvioPages({ + fetchPage: async (limit, offset) => + fetchEnvioLoanEvents({ + field: 'Morpho_WithdrawCollateral', + limit, + offset, + query: envioWithdrawCollateralEventsQuery, + where, + }), + maxItems: ENVIO_EVENTS_MAX_ITEMS, + pageSize: ENVIO_EVENTS_PAGE_SIZE, + }); +}; + +export const fetchEnvioLiquidationRows = async (where: Record): Promise => { + return fetchAllEnvioPages({ + fetchPage: async (limit, offset) => fetchEnvioLiquidationEventsPage(limit, offset, where), + maxItems: ENVIO_EVENTS_MAX_ITEMS, + pageSize: ENVIO_EVENTS_PAGE_SIZE, + }); +}; + +export const fetchEnvioBorrowRateUpdates = async ({ + chainId, + marketId, + timestampGte, + timestampLte, +}: { + chainId: SupportedNetworks; + marketId: string; + timestampGte?: number; + timestampLte?: number; +}): Promise => { + const where: Record = { + chainId: { + _eq: chainId, + }, + market_id: { + _eq: marketId.toLowerCase(), + }, + }; + + if (timestampGte != null || timestampLte != null) { + where.timestamp = { + ...(timestampGte != null ? { _gte: timestampGte } : {}), + ...(timestampLte != null ? { _lte: timestampLte } : {}), + }; + } + + return fetchAllEnvioPages({ + fetchPage: async (limit, offset) => fetchEnvioBorrowRateUpdatesPage(limit, offset, where), + maxItems: ENVIO_EVENTS_MAX_ITEMS, + pageSize: ENVIO_EVENTS_PAGE_SIZE, + }); +}; diff --git a/src/data-sources/envio/fetchers.ts b/src/data-sources/envio/fetchers.ts new file mode 100644 index 00000000..e64afbc9 --- /dev/null +++ b/src/data-sources/envio/fetchers.ts @@ -0,0 +1,68 @@ +import { getEnvioIndexerConfig } from '@/config/dataSources'; + +type EnvioFetcherOptions = { + timeoutMs?: number; +}; + +export const envioGraphqlFetcher = async >( + query: string, + variables: Record, + options: EnvioFetcherOptions = {}, +): Promise => { + const config = getEnvioIndexerConfig(); + + if (!config) { + throw new Error('Envio indexer endpoint is not configured'); + } + + const { timeoutMs } = options; + const abortController = timeoutMs ? new AbortController() : undefined; + const timeoutId = timeoutMs + ? globalThis.setTimeout(() => { + abortController?.abort(); + }, timeoutMs) + : null; + + try { + const headers: HeadersInit = { + 'Content-Type': 'application/json', + }; + + if (config.apiKey) { + headers.Authorization = `Bearer ${config.apiKey}`; + headers['x-api-key'] = config.apiKey; + } + + const response = await fetch(config.endpoint, { + method: 'POST', + headers, + body: JSON.stringify({ query, variables }), + cache: 'no-store', + signal: abortController?.signal, + }); + + if (!response.ok) { + throw new Error(`Network response was not ok from Envio indexer: ${response.status} ${response.statusText}`); + } + + const result = (await response.json()) as T; + + const errors = 'errors' in result ? (result as { errors?: unknown[] }).errors : undefined; + + if (Array.isArray(errors) && errors.length > 0) { + throw new Error(`Envio indexer GraphQL error: ${JSON.stringify(errors)}`); + } + + return result; + } catch (error) { + if (error instanceof Error && error.name === 'AbortError' && timeoutMs) { + throw new Error(`Envio indexer request timed out after ${timeoutMs}ms`); + } + + throw error; + } finally { + if (timeoutId) { + globalThis.clearTimeout(timeoutId); + } + } +}; diff --git a/src/data-sources/envio/historical.ts b/src/data-sources/envio/historical.ts new file mode 100644 index 00000000..3bddd023 --- /dev/null +++ b/src/data-sources/envio/historical.ts @@ -0,0 +1,324 @@ +import { AdaptiveCurveIrmLib, Market as BlueMarket, MarketParams as BlueMarketParams, MarketUtils } from '@morpho-org/blue-sdk'; +import { type Address, formatUnits } from 'viem'; +import morphoAbi from '@/abis/morpho'; +import { fetchMarketDetails } from '@/data-sources/market-details'; +import type { HistoricalDataSuccessResult } from '@/data-sources/morpho-api/historical'; +import { fetchEnvioBorrowRateUpdates } from '@/data-sources/envio/events'; +import type { CustomRpcUrls } from '@/stores/useCustomRpc'; +import type { BlockWithTimestamp } from '@/utils/blockEstimation'; +import { getMorphoAddress } from '@/utils/morpho'; +import type { SupportedNetworks } from '@/utils/networks'; +import { getClient } from '@/utils/rpc'; +import type { MarketRates, MarketVolumes, TimeseriesDataPoint, TimeseriesOptions } from '@/utils/types'; +import { fetchHistoricalChainContext } from '../shared/historical-chain-context'; +import { normalizeEnvioString, normalizeEnvioTimestamp } from './utils'; + +const INTERVAL_SECONDS: Record = { + DAY: 24 * 60 * 60, + HOUR: 60 * 60, + MONTH: 30 * 24 * 60 * 60, + WEEK: 7 * 24 * 60 * 60, +}; +const HISTORICAL_STATE_BATCH_SIZE = 24; +const HISTORICAL_TIMEOUT_MS = 12_000; + +type HistoricalMarketState = { + fee: bigint; + lastUpdate: bigint; + totalBorrowAssets: bigint; + totalBorrowShares: bigint; + totalSupplyAssets: bigint; + totalSupplyShares: bigint; +}; + +const withTimeout = async (promise: Promise, timeoutMs: number, fallbackValue: T): Promise => { + let timeoutHandle: ReturnType | null = null; + + const timeoutPromise = new Promise((resolve) => { + timeoutHandle = globalThis.setTimeout(() => resolve(fallbackValue), timeoutMs); + }); + + try { + return await Promise.race([promise, timeoutPromise]); + } finally { + if (timeoutHandle) { + globalThis.clearTimeout(timeoutHandle); + } + } +}; + +const toNumberValue = (value: bigint, decimals: number): number => { + const formattedValue = formatUnits(value, decimals); + const parsedValue = Number(formattedValue); + return Number.isFinite(parsedValue) ? parsedValue : 0; +}; + +const tryParseBigInt = (value: string): bigint | null => { + try { + return BigInt(value); + } catch { + return null; + } +}; + +const buildTimestamps = ({ endTimestamp, interval, startTimestamp }: TimeseriesOptions): number[] => { + const stepSeconds = INTERVAL_SECONDS[interval]; + const timestamps: number[] = []; + + for (let timestamp = startTimestamp; timestamp <= endTimestamp; timestamp += stepSeconds) { + timestamps.push(timestamp); + } + + if (timestamps.at(-1) !== endTimestamp) { + timestamps.push(endTimestamp); + } + + return timestamps; +}; + +const parseHistoricalMarketState = (value: unknown): HistoricalMarketState | null => { + if (!Array.isArray(value) || value.length < 6 || !value.every((entry) => typeof entry === 'bigint')) { + return null; + } + + return { + fee: value[5] as bigint, + lastUpdate: value[4] as bigint, + totalBorrowAssets: value[2] as bigint, + totalBorrowShares: value[3] as bigint, + totalSupplyAssets: value[0] as bigint, + totalSupplyShares: value[1] as bigint, + }; +}; + +const normalizeRateAtTarget = (value: string): bigint => { + try { + const parsedValue = BigInt(value); + return parsedValue > 0n ? parsedValue : AdaptiveCurveIrmLib.INITIAL_RATE_AT_TARGET; + } catch { + return AdaptiveCurveIrmLib.INITIAL_RATE_AT_TARGET; + } +}; + +const deriveLoanAssetPrice = (market: NonNullable>>): number => { + const parsedSupplyAssets = tryParseBigInt(market.state.supplyAssets); + const supplyAssets = parsedSupplyAssets ? toNumberValue(parsedSupplyAssets, market.loanAsset.decimals) : 0; + if (supplyAssets > 0 && market.state.supplyAssetsUsd > 0) { + return market.state.supplyAssetsUsd / supplyAssets; + } + + const parsedBorrowAssets = tryParseBigInt(market.state.borrowAssets); + const borrowAssets = parsedBorrowAssets ? toNumberValue(parsedBorrowAssets, market.loanAsset.decimals) : 0; + if (borrowAssets > 0 && market.state.borrowAssetsUsd > 0) { + return market.state.borrowAssetsUsd / borrowAssets; + } + + return 0; +}; + +const buildEmptyResult = (): HistoricalDataSuccessResult => ({ + rates: { + apyAtTarget: [], + borrowApy: [], + supplyApy: [], + utilization: [], + }, + volumes: { + borrowAssets: [], + borrowAssetsUsd: [], + liquidityAssets: [], + liquidityAssetsUsd: [], + supplyAssets: [], + supplyAssetsUsd: [], + }, +}); + +const addPoint = (series: TimeseriesDataPoint[], x: number, y: number) => { + if (Number.isFinite(y)) { + series.push({ x, y }); + } +}; + +const fetchHistoricalStates = async ({ + blocks, + chainId, + marketId, + customRpcUrl, +}: { + blocks: BlockWithTimestamp[]; + chainId: SupportedNetworks; + marketId: string; + customRpcUrl?: string; +}): Promise<{ state: HistoricalMarketState; timestamp: number }[]> => { + const client = getClient(chainId, customRpcUrl); + const morphoAddress = getMorphoAddress(chainId) as Address; + const historicalStates: { state: HistoricalMarketState; timestamp: number }[] = []; + + for (let index = 0; index < blocks.length; index += HISTORICAL_STATE_BATCH_SIZE) { + const blockBatch = blocks.slice(index, index + HISTORICAL_STATE_BATCH_SIZE); + const batchResults = await Promise.allSettled( + blockBatch.map(async (block) => + withTimeout( + client.readContract({ + abi: morphoAbi, + address: morphoAddress, + args: [marketId as `0x${string}`], + blockNumber: BigInt(block.blockNumber), + functionName: 'market' as const, + }), + HISTORICAL_TIMEOUT_MS, + null, + ), + ), + ); + + for (const [batchIndex, result] of batchResults.entries()) { + if (result.status !== 'fulfilled' || result.value == null) { + continue; + } + + const parsedState = parseHistoricalMarketState(result.value); + if (!parsedState) { + continue; + } + + historicalStates.push({ + state: parsedState, + timestamp: blockBatch[batchIndex]!.timestamp, + }); + } + } + + return historicalStates.sort((left, right) => left.timestamp - right.timestamp); +}; + +const buildHistoricalResult = ({ + historicalStates, + loanAssetDecimals, + loanAssetPrice, + market, + rateUpdates, +}: { + historicalStates: { state: HistoricalMarketState; timestamp: number }[]; + loanAssetDecimals: number; + loanAssetPrice: number; + market: NonNullable>>; + rateUpdates: Awaited>; +}): HistoricalDataSuccessResult => { + const result = buildEmptyResult(); + const sortedUpdates = [...rateUpdates].sort((left, right) => normalizeEnvioTimestamp(left.timestamp) - normalizeEnvioTimestamp(right.timestamp)); + let rateAtTarget = normalizeRateAtTarget(market.state.rateAtTarget); + let updateIndex = 0; + + for (const historicalPoint of historicalStates) { + while (updateIndex < sortedUpdates.length && normalizeEnvioTimestamp(sortedUpdates[updateIndex]!.timestamp) <= historicalPoint.timestamp) { + rateAtTarget = normalizeRateAtTarget(normalizeEnvioString(sortedUpdates[updateIndex]!.rateAtTarget)); + updateIndex += 1; + } + + const historicalMarket = new BlueMarket({ + fee: historicalPoint.state.fee, + lastUpdate: historicalPoint.state.lastUpdate, + params: new BlueMarketParams({ + collateralToken: market.collateralAsset.address as Address, + irm: market.irmAddress as Address, + lltv: BigInt(market.lltv), + loanToken: market.loanAsset.address as Address, + oracle: market.oracleAddress as Address, + }), + rateAtTarget, + totalBorrowAssets: historicalPoint.state.totalBorrowAssets, + totalBorrowShares: historicalPoint.state.totalBorrowShares, + totalSupplyAssets: historicalPoint.state.totalSupplyAssets, + totalSupplyShares: historicalPoint.state.totalSupplyShares, + }); + + const supplyAssetsRaw = Number(historicalPoint.state.totalSupplyAssets); + const borrowAssetsRaw = Number(historicalPoint.state.totalBorrowAssets); + const liquidityAssetsRaw = Number( + historicalPoint.state.totalSupplyAssets > historicalPoint.state.totalBorrowAssets + ? historicalPoint.state.totalSupplyAssets - historicalPoint.state.totalBorrowAssets + : 0n, + ); + + const supplyAssets = toNumberValue(historicalPoint.state.totalSupplyAssets, loanAssetDecimals); + const borrowAssets = toNumberValue(historicalPoint.state.totalBorrowAssets, loanAssetDecimals); + const liquidityAssets = toNumberValue( + historicalPoint.state.totalSupplyAssets > historicalPoint.state.totalBorrowAssets + ? historicalPoint.state.totalSupplyAssets - historicalPoint.state.totalBorrowAssets + : 0n, + loanAssetDecimals, + ); + + addPoint(result.rates.supplyApy, historicalPoint.timestamp, historicalMarket.supplyApy); + addPoint(result.rates.borrowApy, historicalPoint.timestamp, historicalMarket.borrowApy); + addPoint(result.rates.apyAtTarget, historicalPoint.timestamp, historicalMarket.apyAtTarget ?? MarketUtils.rateToApy(rateAtTarget)); + addPoint(result.rates.utilization, historicalPoint.timestamp, Number(historicalMarket.utilization) / 1e18); + addPoint(result.volumes.supplyAssets, historicalPoint.timestamp, supplyAssetsRaw); + addPoint(result.volumes.borrowAssets, historicalPoint.timestamp, borrowAssetsRaw); + addPoint(result.volumes.liquidityAssets, historicalPoint.timestamp, liquidityAssetsRaw); + addPoint(result.volumes.supplyAssetsUsd, historicalPoint.timestamp, supplyAssets * loanAssetPrice); + addPoint(result.volumes.borrowAssetsUsd, historicalPoint.timestamp, borrowAssets * loanAssetPrice); + addPoint(result.volumes.liquidityAssetsUsd, historicalPoint.timestamp, liquidityAssets * loanAssetPrice); + } + + return result; +}; + +export const fetchEnvioMarketHistoricalData = async ( + marketId: string, + network: SupportedNetworks, + options: TimeseriesOptions, + requestOptions: { + customRpcUrls?: CustomRpcUrls; + } = {}, +): Promise => { + const customRpcUrl = requestOptions.customRpcUrls?.[network]; + const market = await fetchMarketDetails(marketId, network, { + customRpcUrls: requestOptions.customRpcUrls, + enrichHistoricalApys: false, + }); + + if (!market) { + return null; + } + + const client = getClient(network, customRpcUrl); + const chainContext = await fetchHistoricalChainContext({ + chainId: network, + client, + targetTimestamps: buildTimestamps(options), + timeoutMs: HISTORICAL_TIMEOUT_MS, + }); + + if (!chainContext || chainContext.historicalBlocks.length === 0) { + return null; + } + + const [historicalStates, rateUpdates] = await Promise.all([ + fetchHistoricalStates({ + blocks: chainContext.historicalBlocks, + chainId: network, + customRpcUrl, + marketId, + }), + fetchEnvioBorrowRateUpdates({ + chainId: network, + marketId, + timestampGte: options.startTimestamp, + timestampLte: options.endTimestamp, + }).catch(() => []), + ]); + + if (historicalStates.length === 0) { + return null; + } + + return buildHistoricalResult({ + historicalStates, + loanAssetDecimals: market.loanAsset.decimals, + loanAssetPrice: deriveLoanAssetPrice(market), + market, + rateUpdates, + }); +}; diff --git a/src/data-sources/envio/market-activity.ts b/src/data-sources/envio/market-activity.ts new file mode 100644 index 00000000..64e3dde0 --- /dev/null +++ b/src/data-sources/envio/market-activity.ts @@ -0,0 +1,146 @@ +import type { SupportedNetworks } from '@/utils/networks'; +import type { MarketActivityTransaction, MarketLiquidationTransaction, PaginatedMarketActivityTransactions } from '@/utils/types'; +import { + fetchEnvioBorrowRows, + fetchEnvioLiquidationRows, + fetchEnvioRepayRows, + fetchEnvioSupplyRows, + fetchEnvioWithdrawRows, +} from './events'; +import { normalizeEnvioString, normalizeEnvioTimestamp } from './utils'; + +const EVENT_FETCH_MINIMUM = '0'; + +const sortByTimestampDescending = (items: T[]): T[] => { + return items.sort((left, right) => right.timestamp - left.timestamp); +}; + +const buildMarketEventWhere = ({ + chainId, + marketId, + minAssets = EVENT_FETCH_MINIMUM, +}: { + chainId: SupportedNetworks; + marketId: string; + minAssets?: string; +}) => ({ + assets: { + _gte: minAssets, + }, + chainId: { + _eq: chainId, + }, + market_id: { + _eq: marketId.toLowerCase(), + }, +}); + +const mapSupplyActivity = (events: Awaited>): MarketActivityTransaction[] => { + return events.map((event) => ({ + amount: normalizeEnvioString(event.assets), + hash: event.txHash, + timestamp: normalizeEnvioTimestamp(event.timestamp), + type: 'MarketSupply', + userAddress: event.onBehalf, + })); +}; + +const mapWithdrawActivity = (events: Awaited>): MarketActivityTransaction[] => { + return events.map((event) => ({ + amount: normalizeEnvioString(event.assets), + hash: event.txHash, + timestamp: normalizeEnvioTimestamp(event.timestamp), + type: 'MarketWithdraw', + userAddress: event.onBehalf, + })); +}; + +const mapBorrowActivity = (events: Awaited>): MarketActivityTransaction[] => { + return events.map((event) => ({ + amount: normalizeEnvioString(event.assets), + hash: event.txHash, + timestamp: normalizeEnvioTimestamp(event.timestamp), + type: 'MarketBorrow', + userAddress: event.onBehalf, + })); +}; + +const mapRepayActivity = (events: Awaited>): MarketActivityTransaction[] => { + return events.map((event) => ({ + amount: normalizeEnvioString(event.assets), + hash: event.txHash, + timestamp: normalizeEnvioTimestamp(event.timestamp), + type: 'MarketRepay', + userAddress: event.onBehalf, + })); +}; + +export const fetchEnvioMarketSupplies = async ( + marketId: string, + chainId: SupportedNetworks, + minAssets = EVENT_FETCH_MINIMUM, + first = 8, + skip = 0, +): Promise => { + const where = buildMarketEventWhere({ + chainId, + marketId, + minAssets, + }); + + const [supplyEvents, withdrawEvents] = await Promise.all([fetchEnvioSupplyRows(where), fetchEnvioWithdrawRows(where)]); + const items = sortByTimestampDescending([...mapSupplyActivity(supplyEvents), ...mapWithdrawActivity(withdrawEvents)]); + + return { + items: items.slice(skip, skip + first), + totalCount: items.length, + }; +}; + +export const fetchEnvioMarketBorrows = async ( + marketId: string, + chainId: SupportedNetworks, + minAssets = EVENT_FETCH_MINIMUM, + first = 8, + skip = 0, +): Promise => { + const where = buildMarketEventWhere({ + chainId, + marketId, + minAssets, + }); + + const [borrowEvents, repayEvents] = await Promise.all([fetchEnvioBorrowRows(where), fetchEnvioRepayRows(where)]); + const items = sortByTimestampDescending([...mapBorrowActivity(borrowEvents), ...mapRepayActivity(repayEvents)]); + + return { + items: items.slice(skip, skip + first), + totalCount: items.length, + }; +}; + +export const fetchEnvioMarketLiquidations = async ( + marketId: string, + chainId: SupportedNetworks, +): Promise => { + const liquidations = await fetchEnvioLiquidationRows({ + chainId: { + _eq: chainId, + }, + market_id: { + _eq: marketId.toLowerCase(), + }, + }); + + return sortByTimestampDescending( + liquidations.map((liquidation) => ({ + badDebtAssets: normalizeEnvioString(liquidation.badDebtAssets), + hash: liquidation.txHash, + liquidator: liquidation.caller, + repaidAssets: normalizeEnvioString(liquidation.repaidAssets), + seizedAssets: normalizeEnvioString(liquidation.seizedAssets), + timestamp: normalizeEnvioTimestamp(liquidation.timestamp), + type: 'MarketLiquidation' as const, + })), + ); +}; diff --git a/src/data-sources/envio/market-participants.ts b/src/data-sources/envio/market-participants.ts new file mode 100644 index 00000000..74f5c2d8 --- /dev/null +++ b/src/data-sources/envio/market-participants.ts @@ -0,0 +1,170 @@ +import { envioMarketBorrowersQuery, envioMarketSuppliersQuery } from '@/graphql/envio-queries'; +import type { SupportedNetworks } from '@/utils/networks'; +import type { MarketBorrower, MarketSupplier, PaginatedMarketBorrowers, PaginatedMarketSuppliers } from '@/utils/types'; +import { fetchEnvioMarket } from './market'; +import { envioGraphqlFetcher } from './fetchers'; +import { fetchAllEnvioPages, normalizeEnvioString } from './utils'; + +const ENVIO_PARTICIPANTS_PAGE_SIZE = 500; +const ENVIO_PARTICIPANTS_MAX_ITEMS = 1000; +const ENVIO_PARTICIPANTS_TIMEOUT_MS = 15_000; + +type EnvioSupplierRow = { + marketId: string; + supplyShares: string | number; + user: string; +}; + +type EnvioBorrowerRow = { + borrowShares: string | number; + collateral: string | number; + marketId: string; + user: string; +}; + +type EnvioParticipantsResponse = { + data?: { + Position?: (EnvioSupplierRow | EnvioBorrowerRow)[]; + }; +}; + +const toAssets = (shares: string, totalAssets: string, totalShares: string): string => { + try { + const parsedShares = BigInt(shares); + const parsedTotalAssets = BigInt(totalAssets); + const parsedTotalShares = BigInt(totalShares); + + if (parsedShares <= 0n || parsedTotalAssets <= 0n || parsedTotalShares <= 0n) { + return '0'; + } + + return ((parsedShares * parsedTotalAssets) / parsedTotalShares).toString(); + } catch { + return '0'; + } +}; + +const fetchPositionRows = async ({ + limit, + offset, + query, + where, +}: { + limit: number; + offset: number; + query: string; + where: Record; +}): Promise => { + const response = await envioGraphqlFetcher( + query, + { + limit, + offset, + where, + }, + { + timeoutMs: ENVIO_PARTICIPANTS_TIMEOUT_MS, + }, + ); + + return (response.data?.Position ?? []) as TRow[]; +}; + +export const fetchEnvioMarketSuppliers = async ( + marketId: string, + chainId: SupportedNetworks, + minShares = '0', + pageSize = 8, + skip = 0, +): Promise => { + const where = { + chainId: { + _eq: chainId, + }, + marketId: { + _eq: marketId.toLowerCase(), + }, + supplyShares: { + _gt: minShares, + }, + }; + + const suppliers = await fetchAllEnvioPages({ + fetchPage: async (limit, offset) => + fetchPositionRows({ + limit, + offset, + query: envioMarketSuppliersQuery, + where, + }), + maxItems: ENVIO_PARTICIPANTS_MAX_ITEMS, + pageSize: ENVIO_PARTICIPANTS_PAGE_SIZE, + }); + + const items: MarketSupplier[] = suppliers.map((supplier) => ({ + supplyShares: normalizeEnvioString(supplier.supplyShares), + userAddress: supplier.user, + })); + + return { + items: items.slice(skip, skip + pageSize), + totalCount: items.length, + }; +}; + +export const fetchEnvioMarketBorrowers = async ( + marketId: string, + chainId: SupportedNetworks, + minShares = '0', + pageSize = 10, + skip = 0, +): Promise => { + const where = { + borrowShares: { + _gt: minShares, + }, + chainId: { + _eq: chainId, + }, + marketId: { + _eq: marketId.toLowerCase(), + }, + }; + + const [market, borrowers] = await Promise.all([ + fetchEnvioMarket(marketId, chainId), + fetchAllEnvioPages({ + fetchPage: async (limit, offset) => + fetchPositionRows({ + limit, + offset, + query: envioMarketBorrowersQuery, + where, + }), + maxItems: ENVIO_PARTICIPANTS_MAX_ITEMS, + pageSize: ENVIO_PARTICIPANTS_PAGE_SIZE, + }), + ]); + + if (!market) { + return { + items: [], + totalCount: 0, + }; + } + + const items: MarketBorrower[] = borrowers.map((borrower) => { + const borrowShares = normalizeEnvioString(borrower.borrowShares); + + return { + borrowAssets: toAssets(borrowShares, market.state.borrowAssets, market.state.borrowShares), + collateral: normalizeEnvioString(borrower.collateral), + userAddress: borrower.user, + }; + }); + + return { + items: items.slice(skip, skip + pageSize), + totalCount: items.length, + }; +}; diff --git a/src/data-sources/envio/market.ts b/src/data-sources/envio/market.ts new file mode 100644 index 00000000..bb947b16 --- /dev/null +++ b/src/data-sources/envio/market.ts @@ -0,0 +1,345 @@ +import { AdaptiveCurveIrmLib, Market as BlueMarket, MarketParams as BlueMarketParams, MarketUtils } from '@morpho-org/blue-sdk'; +import { type Address, formatUnits, zeroAddress } from 'viem'; +import { envioMarketsQuery } from '@/graphql/envio-queries'; +import { fetchTokenMetadataMap } from '@/data-sources/shared/token-metadata'; +import type { CustomRpcUrls } from '@/stores/useCustomRpc'; +import { isForceUnwhitelisted } from '@/utils/markets'; +import { getChainScopedMarketKey } from '@/utils/marketIdentity'; +import { getMorphoAddress } from '@/utils/morpho'; +import { blacklistTokens, infoToKey } from '@/utils/tokens'; +import { ALL_SUPPORTED_NETWORKS, type SupportedNetworks, isSupportedChain } from '@/utils/networks'; +import type { Market, TokenInfo } from '@/utils/types'; +import { isTokenBlacklistedMarket } from '../shared/market-visibility'; +import { envioGraphqlFetcher } from './fetchers'; + +type EnvioMarketRow = { + chainId: number; + marketId: string; + loanToken: string; + collateralToken: string; + oracle: string; + irm: string; + lltv: string | number; + fee: string | number; + lastUpdate: string | number; + rateAtTarget: string | number; + totalSupplyAssets: string | number; + totalSupplyShares: string | number; + totalBorrowAssets: string | number; + totalBorrowShares: string | number; +}; + +type EnvioMarketsResponse = { + data?: { + Market?: EnvioMarketRow[]; + }; +}; + +const ENVIO_MARKETS_PAGE_SIZE = 1000; +const ENVIO_MARKETS_TIMEOUT_MS = 20_000; + +const normalizeAddress = (value: string | number | null | undefined): Address => { + if (typeof value !== 'string') return zeroAddress; + return (value || zeroAddress) as Address; +}; + +const normalizeString = (value: string | number | null | undefined): string => { + if (typeof value === 'string') return value; + if (typeof value === 'number') return value.toString(); + return '0'; +}; + +const normalizeTimestamp = (value: string | number | null | undefined): number => { + const normalized = normalizeString(value); + try { + return Number(BigInt(normalized)); + } catch { + return 0; + } +}; + +const normalizeRateAtTarget = (value: string | number | null | undefined): bigint => { + try { + const parsed = BigInt(normalizeString(value)); + return parsed > 0n ? parsed : AdaptiveCurveIrmLib.INITIAL_RATE_AT_TARGET; + } catch { + return AdaptiveCurveIrmLib.INITIAL_RATE_AT_TARGET; + } +}; + +const toFallbackTokenInfo = (address: string): TokenInfo => { + return { + address, + decimals: 18, + id: address, + name: 'Unknown Token', + symbol: 'Unknown', + }; +}; + +const withVisibleMarketsFilter = (where: Record): Record => { + return { + _and: [ + where, + { + collateralToken: { + _nin: blacklistTokens, + }, + }, + { + loanToken: { + _nin: blacklistTokens, + }, + }, + ], + }; +}; + +const fetchEnvioMarketsPage = async ({ + limit, + offset, + where, +}: { + limit: number; + offset: number; + where: Record; +}): Promise => { + const response = await envioGraphqlFetcher( + envioMarketsQuery, + { + limit, + offset, + where: withVisibleMarketsFilter(where), + }, + { + timeoutMs: ENVIO_MARKETS_TIMEOUT_MS, + }, + ); + + return response.data?.Market ?? []; +}; + +const buildEnvioMarket = (market: EnvioMarketRow, tokenMetadataMap: Map): Market | null => { + if (!isSupportedChain(market.chainId)) { + return null; + } + + const chainId = market.chainId as SupportedNetworks; + const loanTokenAddress = normalizeAddress(market.loanToken); + const collateralTokenAddress = normalizeAddress(market.collateralToken); + const oracleAddress = normalizeAddress(market.oracle); + const irmAddress = normalizeAddress(market.irm); + const uniqueKey = normalizeString(market.marketId); + const totalSupplyAssets = normalizeString(market.totalSupplyAssets); + const totalSupplyShares = normalizeString(market.totalSupplyShares); + const totalBorrowAssets = normalizeString(market.totalBorrowAssets); + const totalBorrowShares = normalizeString(market.totalBorrowShares); + const lastUpdate = normalizeTimestamp(market.lastUpdate); + const rawFee = normalizeString(market.fee); + const fee = Number(formatUnits(BigInt(rawFee), 18)); + const rateAtTarget = normalizeRateAtTarget(market.rateAtTarget); + const loanAsset = tokenMetadataMap.get(infoToKey(loanTokenAddress, chainId)) ?? toFallbackTokenInfo(loanTokenAddress); + const collateralAsset = tokenMetadataMap.get(infoToKey(collateralTokenAddress, chainId)) ?? toFallbackTokenInfo(collateralTokenAddress); + + try { + const blueMarket = new BlueMarket({ + params: new BlueMarketParams({ + loanToken: loanTokenAddress, + collateralToken: collateralTokenAddress, + oracle: oracleAddress, + irm: irmAddress, + lltv: BigInt(normalizeString(market.lltv)), + }), + totalSupplyAssets: BigInt(totalSupplyAssets), + totalBorrowAssets: BigInt(totalBorrowAssets), + totalSupplyShares: BigInt(totalSupplyShares), + totalBorrowShares: BigInt(totalBorrowShares), + lastUpdate: BigInt(lastUpdate), + fee: BigInt(rawFee), + rateAtTarget, + }); + + return { + id: uniqueKey, + uniqueKey, + lltv: normalizeString(market.lltv), + irmAddress, + oracleAddress, + whitelisted: !isForceUnwhitelisted(uniqueKey), + loanAsset, + collateralAsset, + morphoBlue: { + id: getMorphoAddress(chainId), + address: getMorphoAddress(chainId), + chain: { + id: chainId, + }, + }, + state: { + borrowAssets: totalBorrowAssets, + supplyAssets: totalSupplyAssets, + borrowShares: totalBorrowShares, + supplyShares: totalSupplyShares, + liquidityAssets: blueMarket.liquidity.toString(), + collateralAssets: '0', + borrowAssetsUsd: 0, + supplyAssetsUsd: 0, + liquidityAssetsUsd: 0, + collateralAssetsUsd: null, + utilization: Number(blueMarket.utilization) / 1e18, + supplyApy: blueMarket.supplyApy, + borrowApy: blueMarket.borrowApy, + fee, + timestamp: lastUpdate, + apyAtTarget: MarketUtils.rateToApy(rateAtTarget), + rateAtTarget: rateAtTarget.toString(), + dailySupplyApy: null, + dailyBorrowApy: null, + weeklySupplyApy: null, + weeklyBorrowApy: null, + monthlySupplyApy: null, + monthlyBorrowApy: null, + }, + warnings: [], + hasUSDPrice: false, + usdPriceSource: 'none', + realizedBadDebt: { + underlying: '0', + }, + supplyingVaults: [], + }; + } catch (error) { + console.error(`[envio] Failed to map market ${getChainScopedMarketKey(uniqueKey, chainId)}:`, error); + return null; + } +}; + +const toMarketTokenRefs = (markets: EnvioMarketRow[]): { address: string; chainId: SupportedNetworks }[] => { + const tokenRefs: { address: string; chainId: SupportedNetworks }[] = []; + + for (const market of markets) { + if (!isSupportedChain(market.chainId)) { + continue; + } + + const chainId = market.chainId as SupportedNetworks; + tokenRefs.push( + { + address: normalizeAddress(market.loanToken), + chainId, + }, + { + address: normalizeAddress(market.collateralToken), + chainId, + }, + ); + } + + return tokenRefs; +}; + +const buildEnvioMarketsMap = async ( + rows: EnvioMarketRow[], + options: { + customRpcUrls?: CustomRpcUrls; + } = {}, +): Promise> => { + const visibleRows = rows.filter((market) => !isTokenBlacklistedMarket(market)); + const tokenMetadataMap = await fetchTokenMetadataMap(toMarketTokenRefs(visibleRows), options.customRpcUrls); + const marketsByKey = new Map(); + + for (const row of visibleRows) { + const market = buildEnvioMarket(row, tokenMetadataMap); + + if (!market) { + continue; + } + + marketsByKey.set(getChainScopedMarketKey(market.uniqueKey, market.morphoBlue.chain.id), market); + } + + return marketsByKey; +}; + +export const fetchEnvioMarkets = async ( + chainIds: SupportedNetworks[] = ALL_SUPPORTED_NETWORKS, + options: { + customRpcUrls?: CustomRpcUrls; + } = {}, +): Promise => { + const rows: EnvioMarketRow[] = []; + + for (let offset = 0; ; offset += ENVIO_MARKETS_PAGE_SIZE) { + const page = await fetchEnvioMarketsPage({ + limit: ENVIO_MARKETS_PAGE_SIZE, + offset, + where: { + chainId: { + _in: chainIds, + }, + }, + }); + if (page.length === 0) break; + + rows.push(...page); + + if (page.length < ENVIO_MARKETS_PAGE_SIZE) { + break; + } + } + + return Array.from((await buildEnvioMarketsMap(rows, options)).values()); +}; + +export const fetchEnvioMarketsByKeys = async ( + marketRequests: { marketUniqueKey: string; chainId: SupportedNetworks }[], + options: { + customRpcUrls?: CustomRpcUrls; + } = {}, +): Promise> => { + if (marketRequests.length === 0) { + return new Map(); + } + + const uniqueRequests = new Map(); + + for (const marketRequest of marketRequests) { + uniqueRequests.set(getChainScopedMarketKey(marketRequest.marketUniqueKey, marketRequest.chainId), marketRequest); + } + + const rows = await fetchEnvioMarketsPage({ + limit: uniqueRequests.size, + offset: 0, + where: { + _or: Array.from(uniqueRequests.values()).map((marketRequest) => ({ + chainId: { + _eq: marketRequest.chainId, + }, + marketId: { + _eq: marketRequest.marketUniqueKey.toLowerCase(), + }, + })), + }, + }); + + const requestedKeys = new Set(uniqueRequests.keys()); + const filteredRows = rows.filter((market) => requestedKeys.has(getChainScopedMarketKey(normalizeString(market.marketId), market.chainId))); + return buildEnvioMarketsMap(filteredRows, options); +}; + +export const fetchEnvioMarket = async ( + uniqueKey: string, + chainId: SupportedNetworks, + options: { + customRpcUrls?: CustomRpcUrls; + } = {}, +): Promise => { + const marketMap = await fetchEnvioMarketsByKeys([ + { + marketUniqueKey: uniqueKey, + chainId, + }, + ], options); + + return marketMap.get(getChainScopedMarketKey(uniqueKey, chainId)) ?? null; +}; diff --git a/src/data-sources/envio/positions.ts b/src/data-sources/envio/positions.ts new file mode 100644 index 00000000..77f96e1b --- /dev/null +++ b/src/data-sources/envio/positions.ts @@ -0,0 +1,159 @@ +import type { MarketPosition } from '@/utils/types'; +import { ALL_SUPPORTED_NETWORKS, type SupportedNetworks } from '@/utils/networks'; +import { envioPositionForMarketQuery, envioPositionsQuery } from '@/graphql/envio-queries'; +import { fetchEnvioMarket } from './market'; +import { envioGraphqlFetcher } from './fetchers'; + +type EnvioPositionRow = { + chainId: number; + marketId: string; + supplyShares: string | number; + borrowShares: string | number; + collateral: string | number; + user: string; +}; + +type EnvioPositionsResponse = { + data?: { + Position?: EnvioPositionRow[]; + }; +}; + +const ENVIO_POSITIONS_PAGE_SIZE = 1000; +const ENVIO_POSITIONS_TIMEOUT_MS = 20_000; + +const normalizeString = (value: string | number | null | undefined): string => { + if (typeof value === 'string') return value; + if (typeof value === 'number') return value.toString(); + return '0'; +}; + +const toAssets = (shares: string, totalAssets: string, totalShares: string): string => { + const sharesBigInt = BigInt(shares); + const totalAssetsBigInt = BigInt(totalAssets); + const totalSharesBigInt = BigInt(totalShares); + + if (sharesBigInt <= 0n || totalAssetsBigInt <= 0n || totalSharesBigInt <= 0n) { + return '0'; + } + + return ((sharesBigInt * totalAssetsBigInt) / totalSharesBigInt).toString(); +}; + +const buildPositionsWhere = (user: string, chainIds: SupportedNetworks[]) => { + return { + user: { + _eq: user.toLowerCase(), + }, + chainId: { + _in: chainIds, + }, + _or: [ + { + supplyShares: { + _gt: '0', + }, + }, + { + borrowShares: { + _gt: '0', + }, + }, + { + collateral: { + _gt: '0', + }, + }, + ], + }; +}; + +const fetchEnvioPositionsPage = async (user: string, chainIds: SupportedNetworks[], offset: number): Promise => { + const response = await envioGraphqlFetcher( + envioPositionsQuery, + { + limit: ENVIO_POSITIONS_PAGE_SIZE, + offset, + where: buildPositionsWhere(user, chainIds), + }, + { + timeoutMs: ENVIO_POSITIONS_TIMEOUT_MS, + }, + ); + + return response.data?.Position ?? []; +}; + +export const fetchEnvioUserPositionMarkets = async ( + userAddress: string, + chainIds: SupportedNetworks[] = ALL_SUPPORTED_NETWORKS, +): Promise<{ marketUniqueKey: string; chainId: number }[]> => { + const positions: EnvioPositionRow[] = []; + + for (let offset = 0; ; offset += ENVIO_POSITIONS_PAGE_SIZE) { + const page = await fetchEnvioPositionsPage(userAddress, chainIds, offset); + if (page.length === 0) break; + + positions.push(...page); + + if (page.length < ENVIO_POSITIONS_PAGE_SIZE) { + break; + } + } + + return positions.map((position) => ({ + marketUniqueKey: normalizeString(position.marketId), + chainId: position.chainId, + })); +}; + +export const fetchEnvioUserPositionForMarket = async ( + marketUniqueKey: string, + userAddress: string, + chainId: SupportedNetworks, +): Promise => { + const response = await envioGraphqlFetcher( + envioPositionForMarketQuery, + { + chainId, + marketId: marketUniqueKey.toLowerCase(), + user: userAddress.toLowerCase(), + }, + { + timeoutMs: ENVIO_POSITIONS_TIMEOUT_MS, + }, + ); + + const position = response.data?.Position?.[0]; + + if (!position) { + return null; + } + + const market = await fetchEnvioMarket(marketUniqueKey, chainId); + + if (!market) { + return null; + } + + const supplyShares = normalizeString(position.supplyShares); + const borrowShares = normalizeString(position.borrowShares); + const collateral = normalizeString(position.collateral); + const supplyAssets = toAssets(supplyShares, market.state.supplyAssets, market.state.supplyShares); + const borrowAssets = toAssets(borrowShares, market.state.borrowAssets, market.state.borrowShares); + + if (supplyAssets === '0' && borrowAssets === '0' && collateral === '0') { + return null; + } + + return { + market, + state: { + supplyShares, + supplyAssets, + borrowShares, + borrowAssets, + collateral, + }, + }; +}; diff --git a/src/data-sources/envio/transactions.ts b/src/data-sources/envio/transactions.ts new file mode 100644 index 00000000..5a275b1c --- /dev/null +++ b/src/data-sources/envio/transactions.ts @@ -0,0 +1,258 @@ +import type { TransactionFilters, TransactionResponse } from '@/hooks/queries/fetchUserTransactions'; +import { fetchMarketDetails } from '@/data-sources/market-details'; +import { fetchEnvioMarketsByKeys } from '@/data-sources/envio/market'; +import { getChainScopedMarketKey } from '@/utils/marketIdentity'; +import type { SupportedNetworks } from '@/utils/networks'; +import { type UserTransaction, UserTxTypes } from '@/utils/types'; +import { + fetchEnvioBorrowRows, + fetchEnvioLiquidationRows, + fetchEnvioRepayRows, + fetchEnvioSupplyCollateralRows, + fetchEnvioSupplyRows, + fetchEnvioWithdrawCollateralRows, + fetchEnvioWithdrawRows, +} from './events'; +import { normalizeEnvioString, normalizeEnvioTimestamp } from './utils'; + +const sortTransactionsByTimestampDescending = (transactions: UserTransaction[]): UserTransaction[] => { + return transactions.sort((left, right) => right.timestamp - left.timestamp); +}; + +const buildAddressFilter = (addresses: string[]) => ({ + _in: addresses.map((address) => address.toLowerCase()), +}); + +const buildSharedWhereClause = (filters: TransactionFilters) => { + const where: Record = { + chainId: { + _eq: filters.chainId, + }, + }; + + if (filters.marketUniqueKeys && filters.marketUniqueKeys.length > 0) { + where.market_id = { + _in: filters.marketUniqueKeys.map((marketUniqueKey) => marketUniqueKey.toLowerCase()), + }; + } + + if (filters.timestampGte != null || filters.timestampLte != null) { + where.timestamp = { + ...(filters.timestampGte != null ? { _gte: filters.timestampGte } : {}), + ...(filters.timestampLte != null ? { _lte: filters.timestampLte } : {}), + }; + } + + if (filters.hash) { + where.txHash = { + _eq: filters.hash.toLowerCase(), + }; + } + + return where; +}; + +const buildOnBehalfWhere = (filters: TransactionFilters) => ({ + ...buildSharedWhereClause(filters), + onBehalf: buildAddressFilter(filters.userAddress), +}); + +const buildLiquidationWhere = (filters: TransactionFilters) => ({ + ...buildSharedWhereClause(filters), + borrower: buildAddressFilter(filters.userAddress), +}); + +const matchesAssetFilter = async ({ + assetIds, + chainId, + transactions, +}: { + assetIds: string[] | undefined; + chainId: SupportedNetworks; + transactions: UserTransaction[]; +}): Promise => { + if (!assetIds || assetIds.length === 0 || transactions.length === 0) { + return transactions; + } + + const normalizedAssetIds = new Set(assetIds.map((assetId) => assetId.toLowerCase())); + const uniqueMarketIds = [...new Set(transactions.map((transaction) => transaction.data.market.uniqueKey.toLowerCase()))]; + const envioMarketMap = await fetchEnvioMarketsByKeys( + uniqueMarketIds.map((marketId) => ({ + chainId, + marketUniqueKey: marketId, + })), + ).catch(() => new Map()); + const marketMap = new Map>>(); + const missingMarketIds: string[] = []; + + for (const marketId of uniqueMarketIds) { + const marketKey = getChainScopedMarketKey(marketId, chainId); + const envioMarket = envioMarketMap.get(marketKey); + + if (envioMarket) { + marketMap.set(marketId, envioMarket); + continue; + } + missingMarketIds.push(marketId); + } + + const fallbackResults = await Promise.allSettled( + missingMarketIds.map((marketId) => fetchMarketDetails(marketId, chainId, { enrichHistoricalApys: false })), + ); + + for (const [index, result] of fallbackResults.entries()) { + if (result.status === 'fulfilled' && result.value) { + marketMap.set(missingMarketIds[index]!, result.value); + } + } + + return transactions.filter((transaction) => { + const market = marketMap.get(transaction.data.market.uniqueKey.toLowerCase()); + + if (!market) { + return false; + } + + const isCollateralTransaction = + transaction.type === UserTxTypes.MarketSupplyCollateral || transaction.type === UserTxTypes.MarketWithdrawCollateral; + const relevantAsset = isCollateralTransaction ? market.collateralAsset.address : market.loanAsset.address; + + return normalizedAssetIds.has(relevantAsset.toLowerCase()); + }); +}; + +const toUserTransaction = ({ + assets, + marketId, + shares, + timestamp, + txHash, + type, +}: { + assets: string | number; + marketId: string; + shares?: string | number; + timestamp: string | number; + txHash: string; + type: UserTxTypes; +}): UserTransaction => ({ + data: { + __typename: type, + assets: normalizeEnvioString(assets), + market: { + uniqueKey: marketId, + }, + shares: normalizeEnvioString(shares), + }, + hash: txHash, + timestamp: normalizeEnvioTimestamp(timestamp), + type, +}); + +export const fetchEnvioTransactions = async (filters: TransactionFilters): Promise => { + const onBehalfWhere = buildOnBehalfWhere(filters); + const liquidationWhere = buildLiquidationWhere(filters); + + const [supplyEvents, withdrawEvents, borrowEvents, repayEvents, supplyCollateralEvents, withdrawCollateralEvents, liquidations] = + await Promise.all([ + fetchEnvioSupplyRows(onBehalfWhere), + fetchEnvioWithdrawRows(onBehalfWhere), + fetchEnvioBorrowRows(onBehalfWhere), + fetchEnvioRepayRows(onBehalfWhere), + fetchEnvioSupplyCollateralRows(onBehalfWhere), + fetchEnvioWithdrawCollateralRows(onBehalfWhere), + fetchEnvioLiquidationRows(liquidationWhere), + ]); + + let items: UserTransaction[] = [ + ...supplyEvents.map((event) => + toUserTransaction({ + assets: event.assets, + marketId: event.market_id, + shares: event.shares, + timestamp: event.timestamp, + txHash: event.txHash, + type: UserTxTypes.MarketSupply, + }), + ), + ...withdrawEvents.map((event) => + toUserTransaction({ + assets: event.assets, + marketId: event.market_id, + shares: event.shares, + timestamp: event.timestamp, + txHash: event.txHash, + type: UserTxTypes.MarketWithdraw, + }), + ), + ...borrowEvents.map((event) => + toUserTransaction({ + assets: event.assets, + marketId: event.market_id, + shares: event.shares, + timestamp: event.timestamp, + txHash: event.txHash, + type: UserTxTypes.MarketBorrow, + }), + ), + ...repayEvents.map((event) => + toUserTransaction({ + assets: event.assets, + marketId: event.market_id, + shares: event.shares, + timestamp: event.timestamp, + txHash: event.txHash, + type: UserTxTypes.MarketRepay, + }), + ), + ...supplyCollateralEvents.map((event) => + toUserTransaction({ + assets: event.assets, + marketId: event.market_id, + timestamp: event.timestamp, + txHash: event.txHash, + type: UserTxTypes.MarketSupplyCollateral, + }), + ), + ...withdrawCollateralEvents.map((event) => + toUserTransaction({ + assets: event.assets, + marketId: event.market_id, + timestamp: event.timestamp, + txHash: event.txHash, + type: UserTxTypes.MarketWithdrawCollateral, + }), + ), + ...liquidations.map((event) => + toUserTransaction({ + assets: event.repaidAssets, + marketId: event.market_id, + shares: event.repaidShares, + timestamp: event.timestamp, + txHash: event.txHash, + type: UserTxTypes.MarketLiquidation, + }), + ), + ]; + + items = sortTransactionsByTimestampDescending(items); + items = await matchesAssetFilter({ + assetIds: filters.assetIds, + chainId: filters.chainId, + transactions: items, + }); + + const skip = filters.skip ?? 0; + const first = filters.first ?? items.length; + const paginatedItems = items.slice(skip, skip + first); + + return { + error: null, + items: paginatedItems, + pageInfo: { + count: paginatedItems.length, + countTotal: items.length, + }, + }; +}; diff --git a/src/data-sources/envio/utils.ts b/src/data-sources/envio/utils.ts new file mode 100644 index 00000000..b3a1c8fd --- /dev/null +++ b/src/data-sources/envio/utils.ts @@ -0,0 +1,45 @@ +export const normalizeEnvioString = (value: string | number | null | undefined): string => { + if (typeof value === 'string') return value; + if (typeof value === 'number') return value.toString(); + return '0'; +}; + +export const normalizeEnvioTimestamp = (value: string | number | null | undefined): number => { + const normalizedValue = normalizeEnvioString(value); + + try { + return Number(BigInt(normalizedValue)); + } catch { + return 0; + } +}; + +export const fetchAllEnvioPages = async ({ + fetchPage, + maxItems = 1000, + pageSize = 500, +}: { + fetchPage: (limit: number, offset: number) => Promise; + maxItems?: number; + pageSize?: number; +}): Promise => { + const items: T[] = []; + + for (let offset = 0; offset < maxItems; offset += pageSize) { + const remainingItems = maxItems - items.length; + const limit = Math.min(pageSize, remainingItems); + const page = await fetchPage(limit, offset); + + if (page.length === 0) { + break; + } + + items.push(...page); + + if (page.length < limit || items.length >= maxItems) { + break; + } + } + + return items; +}; diff --git a/src/data-sources/market-activity.ts b/src/data-sources/market-activity.ts new file mode 100644 index 00000000..28ce96cc --- /dev/null +++ b/src/data-sources/market-activity.ts @@ -0,0 +1,91 @@ +import { hasEnvioIndexer, supportsMorphoApi } from '@/config/dataSources'; +import { + fetchEnvioMarketBorrows, + fetchEnvioMarketLiquidations, + fetchEnvioMarketSupplies, +} from '@/data-sources/envio/market-activity'; +import { fetchMorphoMarketBorrows } from '@/data-sources/morpho-api/market-borrows'; +import { fetchMorphoMarketLiquidations } from '@/data-sources/morpho-api/market-liquidations'; +import { fetchMorphoMarketSupplies } from '@/data-sources/morpho-api/market-supplies'; +import { fetchSubgraphMarketBorrows } from '@/data-sources/subgraph/market-borrows'; +import { fetchSubgraphMarketLiquidations } from '@/data-sources/subgraph/market-liquidations'; +import { fetchSubgraphMarketSupplies } from '@/data-sources/subgraph/market-supplies'; +import type { SupportedNetworks } from '@/utils/networks'; +import type { MarketLiquidationTransaction, PaginatedMarketActivityTransactions } from '@/utils/types'; + +export const fetchMarketSupplies = async ( + marketId: string, + loanAssetId: string, + network: SupportedNetworks, + minAssets = '0', + pageSize = 8, + skip = 0, +): Promise => { + if (hasEnvioIndexer()) { + try { + return await fetchEnvioMarketSupplies(marketId, network, minAssets, pageSize, skip); + } catch (envioError) { + console.error('Failed to fetch supplies via Envio:', envioError); + } + } + + if (supportsMorphoApi(network)) { + try { + return await fetchMorphoMarketSupplies(marketId, minAssets, pageSize, skip); + } catch (morphoError) { + console.error('Failed to fetch supplies via Morpho API:', morphoError); + } + } + + return fetchSubgraphMarketSupplies(marketId, loanAssetId, network, minAssets, pageSize, skip); +}; + +export const fetchMarketBorrows = async ( + marketId: string, + loanAssetId: string, + network: SupportedNetworks, + minAssets = '0', + pageSize = 8, + skip = 0, +): Promise => { + if (hasEnvioIndexer()) { + try { + return await fetchEnvioMarketBorrows(marketId, network, minAssets, pageSize, skip); + } catch (envioError) { + console.error('Failed to fetch borrows via Envio:', envioError); + } + } + + if (supportsMorphoApi(network)) { + try { + return await fetchMorphoMarketBorrows(marketId, minAssets, pageSize, skip); + } catch (morphoError) { + console.error('Failed to fetch borrows via Morpho API:', morphoError); + } + } + + return fetchSubgraphMarketBorrows(marketId, loanAssetId, network, minAssets, pageSize, skip); +}; + +export const fetchMarketLiquidations = async ( + marketId: string, + network: SupportedNetworks, +): Promise => { + if (hasEnvioIndexer()) { + try { + return await fetchEnvioMarketLiquidations(marketId, network); + } catch (envioError) { + console.error('Failed to fetch liquidations via Envio:', envioError); + } + } + + if (supportsMorphoApi(network)) { + try { + return await fetchMorphoMarketLiquidations(marketId); + } catch (morphoError) { + console.error('Failed to fetch liquidations via Morpho API:', morphoError); + } + } + + return fetchSubgraphMarketLiquidations(marketId, network); +}; diff --git a/src/data-sources/market-catalog.ts b/src/data-sources/market-catalog.ts new file mode 100644 index 00000000..7a992099 --- /dev/null +++ b/src/data-sources/market-catalog.ts @@ -0,0 +1,127 @@ +import { hasEnvioIndexer, supportsMorphoApi } from '@/config/dataSources'; +import { fetchEnvioMarkets } from '@/data-sources/envio/market'; +import { fetchMorphoMarkets, fetchMorphoMarketsMultiChain } from '@/data-sources/morpho-api/market'; +import { mergeMarketsByIdentity } from '@/data-sources/shared/market-merge'; +import { filterTokenBlacklistedMarkets } from '@/data-sources/shared/market-visibility'; +import { enrichMarketsWithHistoricalApysWithinTimeout } from '@/data-sources/shared/market-rate-enrichment'; +import { getErrorMessage, logDataSourceEvent } from '@/data-sources/shared/source-debug'; +import { fetchSubgraphMarkets } from '@/data-sources/subgraph/market'; +import type { CustomRpcUrls } from '@/stores/useCustomRpc'; +import { ALL_SUPPORTED_NETWORKS, type SupportedNetworks } from '@/utils/networks'; +import type { Market } from '@/utils/types'; + +const MARKET_ENRICHMENT_TIMEOUT_MS = 8_000; + +const getMissingChainIds = (chainIds: SupportedNetworks[], markets: Market[]): SupportedNetworks[] => { + const coveredChainIds = new Set(markets.map((market) => market.morphoBlue.chain.id)); + return chainIds.filter((chainId) => !coveredChainIds.has(chainId)); +}; + +const fetchMarketsPerNetworkFallback = async (chainIds: SupportedNetworks[]): Promise => { + const results = await Promise.allSettled( + chainIds.map(async (network) => { + if (supportsMorphoApi(network)) { + try { + return await fetchMorphoMarkets(network); + } catch { + return fetchSubgraphMarkets(network); + } + } + + return fetchSubgraphMarkets(network); + }), + ); + + return filterTokenBlacklistedMarkets(results.flatMap((result) => (result.status === 'fulfilled' ? result.value : []))); +}; + +export const fetchMarketCatalog = async ( + chainIds: SupportedNetworks[] = ALL_SUPPORTED_NETWORKS, + options: { + customRpcUrls?: CustomRpcUrls; + } = {}, +): Promise => { + const { customRpcUrls } = options; + + if (hasEnvioIndexer()) { + try { + const envioMarkets = await fetchEnvioMarkets(chainIds, { + customRpcUrls, + }); + const missingChainIds = getMissingChainIds(chainIds, envioMarkets); + + if (missingChainIds.length === 0 && envioMarkets.length > 0) { + logDataSourceEvent('market-catalog', 'using Envio as primary source', { + chainIds: chainIds.join(','), + count: envioMarkets.length, + }); + + return enrichMarketsWithHistoricalApysWithinTimeout(envioMarkets, MARKET_ENRICHMENT_TIMEOUT_MS, customRpcUrls); + } + + logDataSourceEvent('market-catalog', 'Envio returned incomplete coverage, falling back for missing chains only', { + requestedChainIds: chainIds.join(','), + coveredChainIds: [...new Set(envioMarkets.map((market) => market.morphoBlue.chain.id))].join(','), + missingChainIds: missingChainIds.join(','), + envioCount: envioMarkets.length, + }); + + const fallbackMarkets = missingChainIds.length > 0 ? await fetchMarketsPerNetworkFallback(missingChainIds) : []; + const mergedMarkets = mergeMarketsByIdentity([...envioMarkets, ...fallbackMarkets]); + + if (mergedMarkets.length > 0) { + logDataSourceEvent('market-catalog', 'merged Envio with fallback markets', { + fallbackCount: fallbackMarkets.length, + totalCount: mergedMarkets.length, + }); + + return enrichMarketsWithHistoricalApysWithinTimeout(mergedMarkets, MARKET_ENRICHMENT_TIMEOUT_MS, customRpcUrls); + } + } catch (error) { + logDataSourceEvent('market-catalog', 'Envio market catalog failed, using legacy fallback', { + chainIds: chainIds.join(','), + reason: getErrorMessage(error), + }); + } + } else { + logDataSourceEvent('market-catalog', 'Envio endpoint not configured, using legacy sources', { + chainIds: chainIds.join(','), + }); + } + + const morphoSupportedChainIds = chainIds.filter(supportsMorphoApi); + const subgraphOnlyChainIds = chainIds.filter((chainId) => !supportsMorphoApi(chainId)); + const markets: Market[] = []; + + if (morphoSupportedChainIds.length > 0) { + try { + markets.push(...(await fetchMorphoMarketsMultiChain(morphoSupportedChainIds))); + logDataSourceEvent('market-catalog', 'used Morpho API fallback for supported chains', { + chainIds: morphoSupportedChainIds.join(','), + }); + } catch (error) { + logDataSourceEvent('market-catalog', 'Morpho multi-chain fallback failed, retrying per-network fallback', { + chainIds: morphoSupportedChainIds.join(','), + reason: getErrorMessage(error), + }); + markets.push(...(await fetchMarketsPerNetworkFallback(morphoSupportedChainIds))); + } + } + + if (subgraphOnlyChainIds.length > 0) { + const subgraphResults = await Promise.allSettled(subgraphOnlyChainIds.map((network) => fetchSubgraphMarkets(network))); + markets.push(...filterTokenBlacklistedMarkets(subgraphResults.flatMap((result) => (result.status === 'fulfilled' ? result.value : [])))); + + logDataSourceEvent('market-catalog', 'used subgraph fallback for non-Morpho chains', { + chainIds: subgraphOnlyChainIds.join(','), + }); + } + + const mergedMarkets = mergeMarketsByIdentity(markets); + + if (mergedMarkets.length > 0) { + return enrichMarketsWithHistoricalApysWithinTimeout(mergedMarkets, MARKET_ENRICHMENT_TIMEOUT_MS, customRpcUrls); + } + + return fetchMarketsPerNetworkFallback(chainIds); +}; diff --git a/src/data-sources/market-details.ts b/src/data-sources/market-details.ts new file mode 100644 index 00000000..fbb17022 --- /dev/null +++ b/src/data-sources/market-details.ts @@ -0,0 +1,106 @@ +import { hasEnvioIndexer, supportsMorphoApi } from '@/config/dataSources'; +import { fetchEnvioMarket } from '@/data-sources/envio/market'; +import { fetchMorphoMarket } from '@/data-sources/morpho-api/market'; +import { isTokenBlacklistedMarket } from '@/data-sources/shared/market-visibility'; +import { fetchSubgraphMarket } from '@/data-sources/subgraph/market'; +import { enrichMarketsWithHistoricalApysWithinTimeout } from '@/data-sources/shared/market-rate-enrichment'; +import { fillMissingMarketUsdValues } from '@/data-sources/shared/market-usd'; +import { getErrorMessage, logDataSourceEvent } from '@/data-sources/shared/source-debug'; +import type { CustomRpcUrls } from '@/stores/useCustomRpc'; +import type { SupportedNetworks } from '@/utils/networks'; +import type { Market } from '@/utils/types'; + +const MARKET_ENRICHMENT_TIMEOUT_MS = 8_000; + +export const fetchMarketDetails = async ( + uniqueKey: string, + network: SupportedNetworks, + options: { + enrichHistoricalApys?: boolean; + customRpcUrls?: CustomRpcUrls; + } = {}, +): Promise => { + const { customRpcUrls, enrichHistoricalApys = false } = options; + let baseMarket: Market | null = null; + + if (hasEnvioIndexer()) { + try { + baseMarket = await fetchEnvioMarket(uniqueKey, network, { + customRpcUrls, + }); + + if (baseMarket) { + logDataSourceEvent('market-details', 'using Envio market details', { + chainId: network, + marketUniqueKey: uniqueKey, + }); + } + } catch (error) { + logDataSourceEvent('market-details', 'Envio market details failed, falling back', { + chainId: network, + marketUniqueKey: uniqueKey, + reason: getErrorMessage(error), + }); + } + } + + if (!baseMarket && supportsMorphoApi(network)) { + try { + baseMarket = await fetchMorphoMarket(uniqueKey, network); + + if (baseMarket) { + logDataSourceEvent('market-details', 'using Morpho API fallback for market details', { + chainId: network, + marketUniqueKey: uniqueKey, + }); + } + } catch (error) { + logDataSourceEvent('market-details', 'Morpho market details failed, falling back to subgraph', { + chainId: network, + marketUniqueKey: uniqueKey, + reason: getErrorMessage(error), + }); + } + } + + if (!baseMarket) { + try { + baseMarket = await fetchSubgraphMarket(uniqueKey, network); + + if (baseMarket) { + logDataSourceEvent('market-details', 'using subgraph fallback for market details', { + chainId: network, + marketUniqueKey: uniqueKey, + }); + } + } catch (error) { + logDataSourceEvent('market-details', 'subgraph market details failed', { + chainId: network, + marketUniqueKey: uniqueKey, + reason: getErrorMessage(error), + }); + } + } + + if (!baseMarket) { + return null; + } + + if (isTokenBlacklistedMarket(baseMarket)) { + logDataSourceEvent('market-details', 'filtered token-blacklisted market from details view', { + chainId: network, + marketUniqueKey: uniqueKey, + }); + return null; + } + + const [marketWithUsd] = await fillMissingMarketUsdValues([baseMarket]); + baseMarket = marketWithUsd ?? baseMarket; + + if (!enrichHistoricalApys) { + return baseMarket; + } + + const [enrichedMarket] = await enrichMarketsWithHistoricalApysWithinTimeout([baseMarket], MARKET_ENRICHMENT_TIMEOUT_MS, customRpcUrls); + return enrichedMarket ?? baseMarket; +}; diff --git a/src/data-sources/market-historical.ts b/src/data-sources/market-historical.ts new file mode 100644 index 00000000..4e7cdd26 --- /dev/null +++ b/src/data-sources/market-historical.ts @@ -0,0 +1,46 @@ +import { hasEnvioIndexer, supportsMorphoApi } from '@/config/dataSources'; +import { fetchEnvioMarketHistoricalData } from '@/data-sources/envio/historical'; +import { fetchMorphoMarketHistoricalData, type HistoricalDataSuccessResult } from '@/data-sources/morpho-api/historical'; +import { fetchSubgraphMarketHistoricalData } from '@/data-sources/subgraph/historical'; +import type { CustomRpcUrls } from '@/stores/useCustomRpc'; +import type { SupportedNetworks } from '@/utils/networks'; +import type { TimeseriesOptions } from '@/utils/types'; + +export const fetchMarketHistoricalData = async ( + uniqueKey: string, + network: SupportedNetworks, + options: TimeseriesOptions, + requestOptions: { + customRpcUrls?: CustomRpcUrls; + } = {}, +): Promise => { + const { customRpcUrls } = requestOptions; + + if (hasEnvioIndexer()) { + try { + const envioData = await fetchEnvioMarketHistoricalData(uniqueKey, network, options, { + customRpcUrls, + }); + + if (envioData) { + return envioData; + } + } catch (envioError) { + console.error('Failed to fetch historical data via Envio:', envioError); + } + } + + if (supportsMorphoApi(network)) { + try { + const morphoData = await fetchMorphoMarketHistoricalData(uniqueKey, network, options); + + if (morphoData) { + return morphoData; + } + } catch (morphoError) { + console.error('Failed to fetch historical data via Morpho API:', morphoError); + } + } + + return fetchSubgraphMarketHistoricalData(uniqueKey, network, options); +}; diff --git a/src/data-sources/market-participants.ts b/src/data-sources/market-participants.ts new file mode 100644 index 00000000..3bdda170 --- /dev/null +++ b/src/data-sources/market-participants.ts @@ -0,0 +1,60 @@ +import { hasEnvioIndexer, supportsMorphoApi } from '@/config/dataSources'; +import { fetchEnvioMarketBorrowers, fetchEnvioMarketSuppliers } from '@/data-sources/envio/market-participants'; +import { fetchMorphoMarketBorrowers } from '@/data-sources/morpho-api/market-borrowers'; +import { fetchMorphoMarketSuppliers } from '@/data-sources/morpho-api/market-suppliers'; +import { fetchSubgraphMarketBorrowers } from '@/data-sources/subgraph/market-borrowers'; +import { fetchSubgraphMarketSuppliers } from '@/data-sources/subgraph/market-suppliers'; +import type { SupportedNetworks } from '@/utils/networks'; +import type { PaginatedMarketBorrowers, PaginatedMarketSuppliers } from '@/utils/types'; + +export const fetchMarketBorrowers = async ( + marketId: string, + network: SupportedNetworks, + minShares = '1', + pageSize = 10, + skip = 0, +): Promise => { + if (hasEnvioIndexer()) { + try { + return await fetchEnvioMarketBorrowers(marketId, network, minShares, pageSize, skip); + } catch (envioError) { + console.error('Failed to fetch borrowers via Envio:', envioError); + } + } + + if (supportsMorphoApi(network)) { + try { + return await fetchMorphoMarketBorrowers(marketId, Number(network), minShares, pageSize, skip); + } catch (morphoError) { + console.error('Failed to fetch borrowers via Morpho API:', morphoError); + } + } + + return fetchSubgraphMarketBorrowers(marketId, network, minShares, pageSize, skip); +}; + +export const fetchMarketSuppliers = async ( + marketId: string, + network: SupportedNetworks, + minShares = '1', + pageSize = 8, + skip = 0, +): Promise => { + if (hasEnvioIndexer()) { + try { + return await fetchEnvioMarketSuppliers(marketId, network, minShares, pageSize, skip); + } catch (envioError) { + console.error('Failed to fetch suppliers via Envio:', envioError); + } + } + + if (supportsMorphoApi(network)) { + try { + return await fetchMorphoMarketSuppliers(marketId, Number(network), minShares, pageSize, skip); + } catch (morphoError) { + console.error('Failed to fetch suppliers via Morpho API:', morphoError); + } + } + + return fetchSubgraphMarketSuppliers(marketId, network, minShares, pageSize, skip); +}; diff --git a/src/data-sources/morpho-api/market.ts b/src/data-sources/morpho-api/market.ts index 88ade00a..8fe858e5 100644 --- a/src/data-sources/morpho-api/market.ts +++ b/src/data-sources/morpho-api/market.ts @@ -1,5 +1,5 @@ import { marketDetailQuery, marketsQuery } from '@/graphql/morpho-api-queries'; -import type { SupportedNetworks } from '@/utils/networks'; +import { ALL_SUPPORTED_NETWORKS, type SupportedNetworks } from '@/utils/networks'; import { blacklistTokens } from '@/utils/tokens'; import type { Market } from '@/utils/types'; import { morphoGraphqlFetcher } from './fetchers'; @@ -48,6 +48,7 @@ const processMarketData = (market: MorphoApiMarket): Market => { oracleAddress: (oracle?.address ?? zeroAddress) as Address, whitelisted: listed, hasUSDPrice: true, + usdPriceSource: 'direct', }; }; @@ -64,11 +65,19 @@ export const fetchMorphoMarket = async (uniqueKey: string, network: SupportedNet }; const fetchMorphoMarketsPage = async (network: SupportedNetworks, skip: number, pageSize: number): Promise => { + return fetchMorphoMarketsPageForChains([network], skip, pageSize); +}; + +const fetchMorphoMarketsPageForChains = async ( + chainIds: SupportedNetworks[], + skip: number, + pageSize: number, +): Promise => { const variables = { first: pageSize, skip, where: { - chainId_in: [network], + chainId_in: chainIds, }, }; @@ -77,7 +86,7 @@ const fetchMorphoMarketsPage = async (network: SupportedNetworks, skip: number, }); if (!response || !response.data?.markets?.items || !response.data.markets.pageInfo) { - console.warn(`[Markets] Skipping failed page at skip=${skip} for network ${network}`); + console.warn(`[Markets] Skipping failed page at skip=${skip} for chains ${chainIds.join(',')}`); return null; } @@ -91,10 +100,14 @@ const fetchMorphoMarketsPage = async (network: SupportedNetworks, skip: number, // Fetcher for multiple markets from Morpho API with pagination export const fetchMorphoMarkets = async (network: SupportedNetworks): Promise => { + return fetchMorphoMarketsMultiChain([network]); +}; + +export const fetchMorphoMarketsMultiChain = async (chainIds: SupportedNetworks[] = ALL_SUPPORTED_NETWORKS): Promise => { const allMarkets: Market[] = []; const pageSize = MORPHO_MARKETS_PAGE_SIZE; - const firstPage = await fetchMorphoMarketsPage(network, 0, pageSize); + const firstPage = await fetchMorphoMarketsPageForChains(chainIds, 0, pageSize); if (!firstPage) { return []; @@ -121,7 +134,7 @@ export const fetchMorphoMarkets = async (network: SupportedNetworks): Promise fetchMorphoMarketsPage(network, skip, pageSize))); + const settledPages = await Promise.allSettled(offsetBatch.map((skip) => fetchMorphoMarketsPageForChains(chainIds, skip, pageSize))); const successfulPages: MorphoMarketsPage[] = []; diff --git a/src/data-sources/position-markets.ts b/src/data-sources/position-markets.ts new file mode 100644 index 00000000..227a9a5a --- /dev/null +++ b/src/data-sources/position-markets.ts @@ -0,0 +1,60 @@ +import { hasEnvioIndexer, supportsMorphoApi } from '@/config/dataSources'; +import { fetchEnvioUserPositionMarkets } from '@/data-sources/envio/positions'; +import { fetchMorphoUserPositionMarkets } from '@/data-sources/morpho-api/positions'; +import { fetchSubgraphUserPositionMarkets } from '@/data-sources/subgraph/positions'; +import { getChainScopedMarketKey } from '@/utils/marketIdentity'; +import { ALL_SUPPORTED_NETWORKS, type SupportedNetworks } from '@/utils/networks'; + +type PositionMarket = { + marketUniqueKey: string; + chainId: number; +}; + +const dedupePositionMarkets = (markets: PositionMarket[]): PositionMarket[] => { + const uniqueMarkets = new Map(); + + for (const market of markets) { + const key = getChainScopedMarketKey(market.marketUniqueKey, market.chainId); + if (!uniqueMarkets.has(key)) { + uniqueMarkets.set(key, market); + } + } + + return Array.from(uniqueMarkets.values()); +}; + +const fetchPositionMarketsPerNetworkFallback = async ( + user: string, + chainIds: SupportedNetworks[], +): Promise => { + const results = await Promise.allSettled( + chainIds.map(async (network) => { + if (supportsMorphoApi(network)) { + try { + return await fetchMorphoUserPositionMarkets(user, network); + } catch { + return fetchSubgraphUserPositionMarkets(user, network); + } + } + + return fetchSubgraphUserPositionMarkets(user, network); + }), + ); + + return dedupePositionMarkets(results.flatMap((result) => (result.status === 'fulfilled' ? result.value : []))); +}; + +export const fetchUserPositionMarkets = async ( + user: string, + chainIds: SupportedNetworks[] = ALL_SUPPORTED_NETWORKS, +): Promise => { + if (hasEnvioIndexer()) { + try { + return dedupePositionMarkets(await fetchEnvioUserPositionMarkets(user, chainIds)); + } catch (error) { + console.error('[positions] Envio cross-chain fetch failed, falling back to per-network sources:', error); + } + } + + return fetchPositionMarketsPerNetworkFallback(user, chainIds); +}; diff --git a/src/data-sources/shared/historical-chain-context.ts b/src/data-sources/shared/historical-chain-context.ts new file mode 100644 index 00000000..04060fbb --- /dev/null +++ b/src/data-sources/shared/historical-chain-context.ts @@ -0,0 +1,114 @@ +import type { PublicClient } from 'viem'; +import { fetchBlocksWithTimestamps, type BlockWithTimestamp } from '@/utils/blockEstimation'; +import type { SupportedNetworks } from '@/utils/networks'; + +const CHAIN_CONTEXT_TIMEOUT_MS = 8_000; +const CHAIN_CONTEXT_CACHE_TTL_MS = 15_000; + +type CachedHistoricalChainContext = { + expiresAt: number; + promise: Promise; +}; + +const historicalChainContextCache = new WeakMap>(); + +const withTimeout = async (promise: Promise, timeoutMs: number, fallbackValue: T): Promise => { + let timeoutHandle: ReturnType | null = null; + + const timeoutPromise = new Promise((resolve) => { + timeoutHandle = globalThis.setTimeout(() => resolve(fallbackValue), timeoutMs); + }); + + try { + return await Promise.race([promise, timeoutPromise]); + } finally { + if (timeoutHandle) { + globalThis.clearTimeout(timeoutHandle); + } + } +}; + +export type HistoricalChainContext = { + currentBlockNumber: bigint; + currentTimestamp: number; + historicalBlocks: BlockWithTimestamp[]; +}; + +export const fetchHistoricalChainContext = async ({ + chainId, + client, + targetLookbackSeconds, + targetTimestamps, + timeoutMs = CHAIN_CONTEXT_TIMEOUT_MS, +}: { + chainId: SupportedNetworks; + client: PublicClient; + targetLookbackSeconds?: number[]; + targetTimestamps?: number[]; + timeoutMs?: number; +}): Promise => { + const targetSignature = + targetLookbackSeconds && targetLookbackSeconds.length > 0 ? `lookback:${targetLookbackSeconds.join(',')}` : `ts:${(targetTimestamps ?? []).join(',')}`; + const cacheKey = `${chainId}:${targetSignature}`; + const now = Date.now(); + const cachedByClient = historicalChainContextCache.get(client); + const cachedEntry = cachedByClient?.get(cacheKey); + + if (cachedEntry && cachedEntry.expiresAt > now) { + return cachedEntry.promise; + } + + const requestPromise = (async (): Promise => { + const currentBlockNumber = await withTimeout(client.getBlockNumber(), timeoutMs, null); + + if (currentBlockNumber == null) { + return null; + } + + const currentBlock = await withTimeout(client.getBlock({ blockNumber: currentBlockNumber }), timeoutMs, null); + + if (!currentBlock) { + return null; + } + + const currentTimestamp = Number(currentBlock.timestamp); + const resolvedTargetTimestamps = + targetLookbackSeconds && targetLookbackSeconds.length > 0 + ? targetLookbackSeconds.map((seconds) => currentTimestamp - seconds) + : (targetTimestamps ?? []); + const historicalBlocks = await withTimeout( + fetchBlocksWithTimestamps(client, chainId, resolvedTargetTimestamps, Number(currentBlockNumber), currentTimestamp), + timeoutMs, + [], + ); + + if (historicalBlocks.length !== resolvedTargetTimestamps.length) { + return null; + } + + return { + currentBlockNumber, + currentTimestamp, + historicalBlocks, + }; + })(); + + const nextCachedByClient = cachedByClient ?? new Map(); + nextCachedByClient.set(cacheKey, { + expiresAt: now + CHAIN_CONTEXT_CACHE_TTL_MS, + promise: requestPromise, + }); + historicalChainContextCache.set(client, nextCachedByClient); + + return requestPromise.then((result) => { + if (result) { + return result; + } + + historicalChainContextCache.get(client)?.delete(cacheKey); + return null; + }).catch((error) => { + historicalChainContextCache.get(client)?.delete(cacheKey); + throw error; + }); +}; diff --git a/src/data-sources/shared/market-merge.ts b/src/data-sources/shared/market-merge.ts new file mode 100644 index 00000000..ba748436 --- /dev/null +++ b/src/data-sources/shared/market-merge.ts @@ -0,0 +1,50 @@ +import { getChainScopedMarketKey } from '@/utils/marketIdentity'; +import type { Market } from '@/utils/types'; + +export const mergeMarketState = (baseMarket: Market, overlayMarket: Market): Market => { + return { + ...baseMarket, + lltv: overlayMarket.lltv || baseMarket.lltv, + irmAddress: overlayMarket.irmAddress || baseMarket.irmAddress, + oracleAddress: overlayMarket.oracleAddress || baseMarket.oracleAddress, + state: { + ...baseMarket.state, + borrowAssets: overlayMarket.state.borrowAssets, + supplyAssets: overlayMarket.state.supplyAssets, + borrowShares: overlayMarket.state.borrowShares, + supplyShares: overlayMarket.state.supplyShares, + liquidityAssets: overlayMarket.state.liquidityAssets, + utilization: overlayMarket.state.utilization, + supplyApy: overlayMarket.state.supplyApy, + borrowApy: overlayMarket.state.borrowApy, + fee: overlayMarket.state.fee, + timestamp: overlayMarket.state.timestamp, + apyAtTarget: overlayMarket.state.apyAtTarget, + rateAtTarget: overlayMarket.state.rateAtTarget, + dailySupplyApy: overlayMarket.state.dailySupplyApy ?? baseMarket.state.dailySupplyApy, + dailyBorrowApy: overlayMarket.state.dailyBorrowApy ?? baseMarket.state.dailyBorrowApy, + weeklySupplyApy: overlayMarket.state.weeklySupplyApy ?? baseMarket.state.weeklySupplyApy, + weeklyBorrowApy: overlayMarket.state.weeklyBorrowApy ?? baseMarket.state.weeklyBorrowApy, + monthlySupplyApy: overlayMarket.state.monthlySupplyApy ?? baseMarket.state.monthlySupplyApy, + monthlyBorrowApy: overlayMarket.state.monthlyBorrowApy ?? baseMarket.state.monthlyBorrowApy, + }, + }; +}; + +export const mergeMarketsByIdentity = (markets: Market[]): Market[] => { + const merged = new Map(); + + for (const market of markets) { + const key = getChainScopedMarketKey(market.uniqueKey, market.morphoBlue.chain.id); + const existing = merged.get(key); + + if (!existing) { + merged.set(key, market); + continue; + } + + merged.set(key, mergeMarketState(existing, market)); + } + + return Array.from(merged.values()); +}; diff --git a/src/data-sources/shared/market-rate-enrichment.ts b/src/data-sources/shared/market-rate-enrichment.ts new file mode 100644 index 00000000..8a7798ff --- /dev/null +++ b/src/data-sources/shared/market-rate-enrichment.ts @@ -0,0 +1,412 @@ +import type { PublicClient } from 'viem'; +import morphoAbi from '@/abis/morpho'; +import { computeAnnualizedApyFromGrowth } from '@/hooks/leverage/math'; +import type { CustomRpcUrls } from '@/stores/useCustomRpc'; +import { getChainScopedMarketKey } from '@/utils/marketIdentity'; +import { getMorphoAddress } from '@/utils/morpho'; +import type { SupportedNetworks } from '@/utils/networks'; +import { getClient } from '@/utils/rpc'; +import type { Market } from '@/utils/types'; +import { fetchHistoricalChainContext } from './historical-chain-context'; +import { filterTokenBlacklistedMarkets } from './market-visibility'; + +const DAY_IN_SECONDS = 24 * 60 * 60; +const LOOKBACK_WINDOWS = [ + { + borrowField: 'dailyBorrowApy', + seconds: DAY_IN_SECONDS, + supplyField: 'dailySupplyApy', + }, + { + borrowField: 'weeklyBorrowApy', + seconds: 7 * DAY_IN_SECONDS, + supplyField: 'weeklySupplyApy', + }, + { + borrowField: 'monthlyBorrowApy', + seconds: 30 * DAY_IN_SECONDS, + supplyField: 'monthlySupplyApy', + }, +] as const; +const INDEX_SCALE = 10n ** 18n; +const DEFAULT_MULTICALL_CHUNK_SIZE = 125; +const LARGE_MARKET_MULTICALL_CHUNK_SIZE = 500; +const BASE_MULTICALL_CHUNK_SIZE = 200; +const LARGE_MARKET_COUNT_THRESHOLD = 500; +const HISTORICAL_MULTICALL_PARALLEL_BATCHES = 2; +const CHAIN_ENRICHMENT_TIMEOUT_MS = 8_000; +const LARGE_CHAIN_ENRICHMENT_TIMEOUT_MS = 12_000; + +type LookbackWindow = (typeof LOOKBACK_WINDOWS)[number]; + +type MarketContractState = { + totalBorrowAssets: bigint; + totalBorrowShares: bigint; + totalSupplyAssets: bigint; + totalSupplyShares: bigint; +}; + +type HistoricalApyEnrichmentOptions = { + customRpcUrls?: CustomRpcUrls; + timeoutMs?: number; +}; + +const pendingChainHistoricalEnrichment = new Map>(); + +const withTimeout = async (promise: Promise, timeoutMs: number, fallbackValue: T): Promise => { + let timeoutHandle: ReturnType | null = null; + + const timeoutPromise = new Promise((resolve) => { + timeoutHandle = globalThis.setTimeout(() => resolve(fallbackValue), timeoutMs); + }); + + try { + return await Promise.race([promise, timeoutPromise]); + } finally { + if (timeoutHandle) { + globalThis.clearTimeout(timeoutHandle); + } + } +}; + +const asBigIntArray = (value: unknown): readonly bigint[] | null => { + if (!Array.isArray(value)) return null; + if (!value.every((entry) => typeof entry === 'bigint')) return null; + return value as readonly bigint[]; +}; + +const parseContractState = (value: unknown): MarketContractState | null => { + const result = asBigIntArray(value); + + if (!result || result.length < 4) { + return null; + } + + return { + totalSupplyAssets: result[0], + totalSupplyShares: result[1], + totalBorrowAssets: result[2], + totalBorrowShares: result[3], + }; +}; + +const getRemainingTimeMs = (deadlineMs: number): number => { + return Math.max(0, deadlineMs - Date.now()); +}; + +const getHistoricalMulticallChunkSize = (chainId: SupportedNetworks, marketCount: number): number => { + if (chainId === 8453) { + return Math.min(BASE_MULTICALL_CHUNK_SIZE, marketCount); + } + + return marketCount > LARGE_MARKET_COUNT_THRESHOLD ? LARGE_MARKET_MULTICALL_CHUNK_SIZE : DEFAULT_MULTICALL_CHUNK_SIZE; +}; + +const getHistoricalEnrichmentTimeoutMs = (marketCount: number, requestedTimeoutMs: number): number => { + if (marketCount > LARGE_MARKET_COUNT_THRESHOLD) { + return Math.max(requestedTimeoutMs, LARGE_CHAIN_ENRICHMENT_TIMEOUT_MS); + } + + return requestedTimeoutMs; +}; + +const toScaledIndex = (totalAssets: bigint | null, totalShares: bigint | null): bigint | null => { + if (!totalAssets || !totalShares || totalAssets <= 0n || totalShares <= 0n) { + return null; + } + + return (totalAssets * INDEX_SCALE) / totalShares; +}; + +const getHistoricalSupplyApy = ( + currentState: MarketContractState, + pastState: MarketContractState, + periodSeconds: number, +): number | null => { + const currentSupplyIndex = toScaledIndex(currentState.totalSupplyAssets, currentState.totalSupplyShares); + const pastSupplyIndex = toScaledIndex(pastState.totalSupplyAssets, pastState.totalSupplyShares); + + if (!currentSupplyIndex || !pastSupplyIndex) { + return null; + } + + return computeAnnualizedApyFromGrowth({ + currentValue: currentSupplyIndex, + pastValue: pastSupplyIndex, + periodSeconds, + }); +}; + +const getHistoricalBorrowApy = ( + currentState: MarketContractState, + pastState: MarketContractState, + periodSeconds: number, +): number | null => { + const currentBorrowIndex = toScaledIndex(currentState.totalBorrowAssets, currentState.totalBorrowShares); + const pastBorrowIndex = toScaledIndex(pastState.totalBorrowAssets, pastState.totalBorrowShares); + + if (!currentBorrowIndex || !pastBorrowIndex) { + return null; + } + + return computeAnnualizedApyFromGrowth({ + currentValue: currentBorrowIndex, + pastValue: pastBorrowIndex, + periodSeconds, + }); +}; + +const fetchHistoricalStatesForWindow = async ( + client: PublicClient, + chainId: SupportedNetworks, + markets: Market[], + blockNumber: bigint | undefined, + chunkSize: number, +): Promise> => { + const states = new Map(); + const morphoAddress = getMorphoAddress(chainId); + const marketBatches: Market[][] = []; + + for (let index = 0; index < markets.length; index += chunkSize) { + marketBatches.push(markets.slice(index, index + chunkSize)); + } + + for (let index = 0; index < marketBatches.length; index += HISTORICAL_MULTICALL_PARALLEL_BATCHES) { + const batchGroup = marketBatches.slice(index, index + HISTORICAL_MULTICALL_PARALLEL_BATCHES); + const settledResults = await Promise.allSettled( + batchGroup.map(async (marketBatch) => { + const contracts = marketBatch.map((market) => ({ + abi: morphoAbi, + address: morphoAddress as `0x${string}`, + args: [market.uniqueKey as `0x${string}`], + functionName: 'market' as const, + })); + + const results = await client.multicall({ + allowFailure: true, + blockNumber, + contracts, + }); + + return { + marketBatch, + results, + }; + }), + ); + + for (const settledResult of settledResults) { + if (settledResult.status !== 'fulfilled') { + continue; + } + + const { marketBatch, results } = settledResult.value; + + for (const [resultIndex, result] of results.entries()) { + const market = marketBatch[resultIndex]; + if (result.status !== 'success') { + continue; + } + + const parsedState = parseContractState(result.result); + if (!parsedState) { + continue; + } + + states.set(getChainScopedMarketKey(market.uniqueKey, chainId), parsedState); + } + } + } + + return states; +}; + +const applyHistoricalWindow = ( + market: Market, + window: LookbackWindow, + periodSeconds: number, + currentStates: Map, + historicalStates: Map, +): Market => { + const stateKey = getChainScopedMarketKey(market.uniqueKey, market.morphoBlue.chain.id); + const currentState = currentStates.get(stateKey); + const pastState = historicalStates.get(stateKey); + + if (!currentState || !pastState || periodSeconds <= 0) { + return market; + } + + const nextState = { ...market.state }; + + if (nextState[window.supplyField] == null) { + nextState[window.supplyField] = getHistoricalSupplyApy(currentState, pastState, periodSeconds); + } + + if (nextState[window.borrowField] == null) { + nextState[window.borrowField] = getHistoricalBorrowApy(currentState, pastState, periodSeconds); + } + + return { + ...market, + state: nextState, + }; +}; + +const enrichChainMarkets = async ( + chainId: SupportedNetworks, + markets: Market[], + options: HistoricalApyEnrichmentOptions = {}, +): Promise => { + if (markets.length === 0) { + return markets; + } + + const customRpcKey = options.customRpcUrls?.[chainId] ?? 'default'; + const marketKey = [...new Set(markets.map((market) => market.uniqueKey))].sort().join(','); + const pendingKey = `${chainId}:${customRpcKey}:${marketKey}`; + const pendingRequest = pendingChainHistoricalEnrichment.get(pendingKey); + + if (pendingRequest) { + return pendingRequest; + } + + const requestPromise = (async (): Promise => { + const requestedTimeoutMs = options.timeoutMs ?? CHAIN_ENRICHMENT_TIMEOUT_MS; + const timeoutMs = getHistoricalEnrichmentTimeoutMs(markets.length, requestedTimeoutMs); + const chunkSize = getHistoricalMulticallChunkSize(chainId, markets.length); + const deadlineMs = Date.now() + timeoutMs; + const client = getClient(chainId, options.customRpcUrls?.[chainId]); + const chainContext = await fetchHistoricalChainContext({ + chainId, + client, + targetLookbackSeconds: LOOKBACK_WINDOWS.map((window) => window.seconds), + timeoutMs, + }); + + if (!chainContext) { + return markets; + } + + const { currentTimestamp, historicalBlocks } = chainContext; + const currentStates = await withTimeout( + fetchHistoricalStatesForWindow(client, chainId, markets, chainContext.currentBlockNumber, chunkSize), + getRemainingTimeMs(deadlineMs), + null, + ); + + if (!currentStates || currentStates.size === 0) { + return markets; + } + + let enrichedMarkets = markets; + + for (const [index, window] of LOOKBACK_WINDOWS.entries()) { + const remainingTimeMs = getRemainingTimeMs(deadlineMs); + + if (remainingTimeMs <= 0) { + break; + } + + const block = historicalBlocks[index]; + const periodSeconds = currentTimestamp - block.timestamp; + + if (periodSeconds <= 0) { + continue; + } + + const historicalStates = await withTimeout( + fetchHistoricalStatesForWindow(client, chainId, enrichedMarkets, BigInt(block.blockNumber), chunkSize), + remainingTimeMs, + null, + ); + + if (!historicalStates || historicalStates.size === 0) { + continue; + } + + enrichedMarkets = enrichedMarkets.map((market) => + applyHistoricalWindow(market, window, periodSeconds, currentStates, historicalStates), + ); + } + + return enrichedMarkets; + })(); + + pendingChainHistoricalEnrichment.set(pendingKey, requestPromise); + + try { + return await requestPromise; + } finally { + pendingChainHistoricalEnrichment.delete(pendingKey); + } +}; + +export const marketNeedsHistoricalApyEnrichment = (market: Market): boolean => { + return ( + market.state.dailySupplyApy == null || + market.state.dailyBorrowApy == null || + market.state.weeklySupplyApy == null || + market.state.weeklyBorrowApy == null || + market.state.monthlySupplyApy == null || + market.state.monthlyBorrowApy == null + ); +}; + +export const enrichMarketsWithHistoricalApys = async ( + markets: Market[], + options: HistoricalApyEnrichmentOptions = {}, +): Promise => { + const visibleMarkets = filterTokenBlacklistedMarkets(markets); + const marketsByChain = new Map(); + + for (const market of visibleMarkets) { + if (!marketNeedsHistoricalApyEnrichment(market)) { + continue; + } + + const chainMarkets = marketsByChain.get(market.morphoBlue.chain.id) ?? []; + chainMarkets.push(market); + marketsByChain.set(market.morphoBlue.chain.id, chainMarkets); + } + + if (marketsByChain.size === 0) { + return markets; + } + + const chainResults = await Promise.allSettled( + Array.from(marketsByChain.entries()).map(async ([chainId, chainMarkets]) => { + return { + chainId, + markets: await enrichChainMarkets(chainId, chainMarkets, options), + }; + }), + ); + + const enrichedByKey = new Map(); + + for (const result of chainResults) { + if (result.status !== 'fulfilled') { + continue; + } + + for (const market of result.value.markets) { + enrichedByKey.set(getChainScopedMarketKey(market.uniqueKey, market.morphoBlue.chain.id), market); + } + } + + return markets.map((market) => { + const enrichedMarket = enrichedByKey.get(getChainScopedMarketKey(market.uniqueKey, market.morphoBlue.chain.id)); + return enrichedMarket ?? market; + }); +}; + +export const enrichMarketsWithHistoricalApysWithinTimeout = async ( + markets: Market[], + timeoutMs: number, + customRpcUrls?: CustomRpcUrls, +): Promise => { + return enrichMarketsWithHistoricalApys(markets, { + customRpcUrls, + timeoutMs, + }); +}; diff --git a/src/data-sources/shared/market-usd.ts b/src/data-sources/shared/market-usd.ts new file mode 100644 index 00000000..ca918bdc --- /dev/null +++ b/src/data-sources/shared/market-usd.ts @@ -0,0 +1,157 @@ +import { fetchTokenPrices, getTokenPriceKey, type TokenPriceInput } from '@/data-sources/morpho-api/prices'; +import { formatBalance } from '@/utils/balance'; +import type { Market, MarketUsdPriceSource } from '@/utils/types'; + +const hasPositiveAssets = (value?: string): boolean => { + if (!value) return false; + + try { + return BigInt(value) > 0n; + } catch { + return false; + } +}; + +const isFiniteNumber = (value: number | null | undefined): value is number => { + return value !== null && value !== undefined && Number.isFinite(value); +}; + +const isFinitePositiveNumber = (value: number | undefined): value is number => { + return value !== undefined && Number.isFinite(value) && value > 0; +}; + +export const shouldComputeUsd = (usdValue: number | null | undefined, assets?: string): boolean => { + if (!isFiniteNumber(usdValue)) return hasPositiveAssets(assets); + if (usdValue === 0 && hasPositiveAssets(assets)) return true; + return false; +}; + +const computeUsdValue = (assets: string, decimals: number, price: number): number => { + return formatBalance(assets, decimals) * price; +}; + +export const collectTokenPriceInputsForMarkets = (markets: Market[]): TokenPriceInput[] => { + if (markets.length === 0) { + return []; + } + + const tokens: TokenPriceInput[] = []; + const seen = new Set(); + + const addToken = (address: string, chainId: number) => { + const key = getTokenPriceKey(address, chainId); + if (seen.has(key)) return; + seen.add(key); + tokens.push({ address, chainId }); + }; + + for (const market of markets) { + const chainId = market.morphoBlue.chain.id; + const needsLoanUsd = + shouldComputeUsd(market.state.supplyAssetsUsd, market.state.supplyAssets) || + shouldComputeUsd(market.state.borrowAssetsUsd, market.state.borrowAssets) || + shouldComputeUsd(market.state.liquidityAssetsUsd, market.state.liquidityAssets); + const needsCollateralUsd = shouldComputeUsd(market.state.collateralAssetsUsd ?? null, market.state.collateralAssets); + + if (needsLoanUsd) { + addToken(market.loanAsset.address, chainId); + } + + if (needsCollateralUsd) { + addToken(market.collateralAsset.address, chainId); + } + } + + return tokens; +}; + +export const applyTokenPricesToMarkets = (markets: Market[], tokenPrices: Map): Market[] => { + const directPriceSources = new Map(); + + tokenPrices.forEach((_price, key) => { + directPriceSources.set(key, 'direct'); + }); + + return applyTokenPriceResolutionToMarkets(markets, tokenPrices, directPriceSources); +}; + +export const applyTokenPriceResolutionToMarkets = ( + markets: Market[], + tokenPrices: Map, + tokenPriceSources: Map, +): Market[] => { + if (markets.length === 0 || tokenPrices.size === 0) { + return markets; + } + + return markets.map((market) => { + const chainId = market.morphoBlue.chain.id; + const loanPriceKey = getTokenPriceKey(market.loanAsset.address, chainId); + const collateralPriceKey = getTokenPriceKey(market.collateralAsset.address, chainId); + const loanPrice = tokenPrices.get(loanPriceKey); + const collateralPrice = tokenPrices.get(collateralPriceKey); + const loanPriceSource = + tokenPriceSources.get(loanPriceKey) ?? (isFinitePositiveNumber(loanPrice) ? 'direct' : undefined); + + let nextState = market.state; + let changed = false; + + if (loanPrice !== undefined && Number.isFinite(loanPrice)) { + if (shouldComputeUsd(nextState.supplyAssetsUsd, nextState.supplyAssets)) { + nextState = { ...nextState, supplyAssetsUsd: computeUsdValue(nextState.supplyAssets, market.loanAsset.decimals, loanPrice) }; + changed = true; + } + + if (shouldComputeUsd(nextState.borrowAssetsUsd, nextState.borrowAssets)) { + nextState = { ...nextState, borrowAssetsUsd: computeUsdValue(nextState.borrowAssets, market.loanAsset.decimals, loanPrice) }; + changed = true; + } + + if (shouldComputeUsd(nextState.liquidityAssetsUsd, nextState.liquidityAssets)) { + nextState = { + ...nextState, + liquidityAssetsUsd: computeUsdValue(nextState.liquidityAssets, market.loanAsset.decimals, loanPrice), + }; + changed = true; + } + } + + if ( + collateralPrice !== undefined && + Number.isFinite(collateralPrice) && + shouldComputeUsd(nextState.collateralAssetsUsd ?? null, nextState.collateralAssets) + ) { + nextState = { + ...nextState, + collateralAssetsUsd: computeUsdValue(nextState.collateralAssets, market.collateralAsset.decimals, collateralPrice), + }; + changed = true; + } + + const nextHasUsdPrice = market.hasUSDPrice || loanPriceSource === 'direct'; + const nextUsdPriceSource = + market.usdPriceSource === 'direct' ? 'direct' : (loanPriceSource ?? market.usdPriceSource); + + if (!changed && market.hasUSDPrice === nextHasUsdPrice && market.usdPriceSource === nextUsdPriceSource) { + return market; + } + + return { + ...market, + hasUSDPrice: nextHasUsdPrice, + usdPriceSource: nextUsdPriceSource, + state: nextState, + }; + }); +}; + +export const fillMissingMarketUsdValues = async (markets: Market[]): Promise => { + const tokenInputs = collectTokenPriceInputsForMarkets(markets); + + if (tokenInputs.length === 0) { + return markets; + } + + const tokenPrices = await fetchTokenPrices(tokenInputs); + return applyTokenPricesToMarkets(markets, tokenPrices); +}; diff --git a/src/data-sources/shared/market-visibility.ts b/src/data-sources/shared/market-visibility.ts new file mode 100644 index 00000000..606a22b8 --- /dev/null +++ b/src/data-sources/shared/market-visibility.ts @@ -0,0 +1,29 @@ +import { blacklistTokens } from '@/utils/tokens'; +import type { Market } from '@/utils/types'; + +type MarketAddressShape = { + collateralToken?: string | null; + loanToken?: string | null; +}; + +const normalizeAddress = (address: string | null | undefined): string => { + return address?.toLowerCase() ?? ''; +}; + +export const isBlacklistedTokenAddress = (address: string | null | undefined): boolean => { + const normalizedAddress = normalizeAddress(address); + return normalizedAddress.length > 0 && blacklistTokens.includes(normalizedAddress); +}; + +export const isTokenBlacklistedMarket = ( + market: Pick | MarketAddressShape, +): boolean => { + const collateralAddress = 'collateralAsset' in market ? market.collateralAsset?.address : market.collateralToken; + const loanAddress = 'loanAsset' in market ? market.loanAsset?.address : market.loanToken; + + return isBlacklistedTokenAddress(collateralAddress) || isBlacklistedTokenAddress(loanAddress); +}; + +export const filterTokenBlacklistedMarkets = >(markets: T[]): T[] => { + return markets.filter((market) => !isTokenBlacklistedMarket(market)); +}; diff --git a/src/data-sources/shared/source-debug.ts b/src/data-sources/shared/source-debug.ts new file mode 100644 index 00000000..6d2c1ec6 --- /dev/null +++ b/src/data-sources/shared/source-debug.ts @@ -0,0 +1,42 @@ +const shouldLogDataSourceEvents = (): boolean => { + return process.env.NODE_ENV !== 'production' || process.env.NEXT_PUBLIC_DEBUG_DATA_SOURCES === 'true'; +}; + +const formatDetails = (details: Record | undefined): Record | undefined => { + if (!details) { + return undefined; + } + + return Object.fromEntries(Object.entries(details).filter(([, value]) => value !== undefined)); +}; + +export const logDataSourceEvent = ( + scope: string, + message: string, + details?: Record, +): void => { + if (!shouldLogDataSourceEvents()) { + return; + } + + const formattedDetails = formatDetails(details); + + if (formattedDetails) { + console.info(`[data-source:${scope}] ${message}`, formattedDetails); + return; + } + + console.info(`[data-source:${scope}] ${message}`); +}; + +export const getErrorMessage = (error: unknown): string => { + if (error instanceof Error) { + return error.message; + } + + if (typeof error === 'string') { + return error; + } + + return 'Unknown error'; +}; diff --git a/src/data-sources/shared/token-metadata.ts b/src/data-sources/shared/token-metadata.ts new file mode 100644 index 00000000..3d523d47 --- /dev/null +++ b/src/data-sources/shared/token-metadata.ts @@ -0,0 +1,198 @@ +import { type Address, erc20Abi } from 'viem'; +import type { CustomRpcUrls } from '@/stores/useCustomRpc'; +import type { SupportedNetworks } from '@/utils/networks'; +import { getClient } from '@/utils/rpc'; +import { infoToKey } from '@/utils/tokens'; +import { fetchMergedTokenCatalog, findTokenInCatalog, getLocalTokenCatalog } from '@/utils/tokenCatalog'; +import type { TokenInfo } from '@/utils/types'; + +const DEFAULT_TOKEN_DECIMALS = 18; +const UNKNOWN_TOKEN_NAME = 'Unknown Token'; + +const resolvedTokenMetadataCache = new Map(); +const pendingTokenMetadataCache = new Map>(); + +type DeferredTokenInfo = { + promise: Promise; + resolve: (value: TokenInfo) => void; +}; + +const createFallbackTokenInfo = ( + address: string, + metadata?: Partial>, +): TokenInfo => { + return { + address, + decimals: metadata?.decimals ?? DEFAULT_TOKEN_DECIMALS, + id: address, + name: metadata?.name ?? UNKNOWN_TOKEN_NAME, + symbol: metadata?.symbol ?? 'Unknown', + }; +}; + +const toTokenInfoFromCatalog = (address: string, token: Awaited>[number]): TokenInfo => { + return { + address, + decimals: token.decimals, + id: address, + name: token.symbol, + symbol: token.symbol, + }; +}; + +const dedupeTokenRefs = (tokenRefs: { address: string; chainId: SupportedNetworks }[]): { address: string; chainId: SupportedNetworks }[] => { + const uniqueTokenRefs = new Map(); + + for (const tokenRef of tokenRefs) { + uniqueTokenRefs.set(infoToKey(tokenRef.address, tokenRef.chainId), tokenRef); + } + + return Array.from(uniqueTokenRefs.values()); +}; + +const createDeferredTokenInfo = (): DeferredTokenInfo => { + let resolve!: (value: TokenInfo) => void; + const promise = new Promise((innerResolve) => { + resolve = innerResolve; + }); + + return { promise, resolve }; +}; + +export const fetchTokenMetadataMap = async ( + tokenRefs: { address: string; chainId: SupportedNetworks }[], + customRpcUrls?: CustomRpcUrls, +): Promise> => { + const metadataMap = new Map(); + + if (tokenRefs.length === 0) { + return metadataMap; + } + + const dedupedTokenRefs = dedupeTokenRefs(tokenRefs); + const tokenCatalog = await fetchMergedTokenCatalog().catch(() => getLocalTokenCatalog()); + const unresolvedByChain = new Map(); + const pendingEntries: Array<{ key: string; promise: Promise }> = []; + + for (const tokenRef of dedupedTokenRefs) { + const key = infoToKey(tokenRef.address, tokenRef.chainId); + const cachedTokenInfo = resolvedTokenMetadataCache.get(key); + + if (cachedTokenInfo) { + metadataMap.set(key, cachedTokenInfo); + continue; + } + + const catalogToken = findTokenInCatalog(tokenCatalog, tokenRef.address, tokenRef.chainId); + + if (catalogToken) { + const tokenInfo = toTokenInfoFromCatalog(tokenRef.address, catalogToken); + resolvedTokenMetadataCache.set(key, tokenInfo); + metadataMap.set(key, tokenInfo); + continue; + } + + const pendingTokenInfo = pendingTokenMetadataCache.get(key); + + if (pendingTokenInfo) { + pendingEntries.push({ key, promise: pendingTokenInfo }); + continue; + } + + const chainAddresses = unresolvedByChain.get(tokenRef.chainId) ?? []; + chainAddresses.push(tokenRef.address); + unresolvedByChain.set(tokenRef.chainId, chainAddresses); + } + + await Promise.all( + Array.from(unresolvedByChain.entries()).map(async ([chainId, addresses]) => { + const uniqueAddresses = [...new Set(addresses)]; + + if (uniqueAddresses.length === 0) { + return; + } + + const client = getClient(chainId, customRpcUrls?.[chainId]); + const deferredByKey = new Map(); + + for (const address of uniqueAddresses) { + const key = infoToKey(address, chainId); + const deferred = createDeferredTokenInfo(); + deferredByKey.set(key, deferred); + pendingTokenMetadataCache.set(key, deferred.promise); + } + + try { + const contracts = uniqueAddresses.flatMap((address) => [ + { + abi: erc20Abi, + address: address as Address, + functionName: 'symbol' as const, + }, + { + abi: erc20Abi, + address: address as Address, + functionName: 'name' as const, + }, + { + abi: erc20Abi, + address: address as Address, + functionName: 'decimals' as const, + }, + ]); + + const results = await client.multicall({ + allowFailure: true, + contracts, + }); + + for (const [index, address] of uniqueAddresses.entries()) { + const symbolResult = results[index * 3]; + const nameResult = results[index * 3 + 1]; + const decimalsResult = results[index * 3 + 2]; + + const tokenInfo = createFallbackTokenInfo(address, { + decimals: + decimalsResult?.status === 'success' && typeof decimalsResult.result === 'number' + ? decimalsResult.result + : DEFAULT_TOKEN_DECIMALS, + name: nameResult?.status === 'success' && typeof nameResult.result === 'string' ? nameResult.result : UNKNOWN_TOKEN_NAME, + symbol: symbolResult?.status === 'success' && typeof symbolResult.result === 'string' ? symbolResult.result : 'Unknown', + }); + + const key = infoToKey(address, chainId); + resolvedTokenMetadataCache.set(key, tokenInfo); + metadataMap.set(key, tokenInfo); + deferredByKey.get(key)?.resolve(tokenInfo); + } + } catch { + for (const address of uniqueAddresses) { + const key = infoToKey(address, chainId); + const tokenInfo = createFallbackTokenInfo(address); + resolvedTokenMetadataCache.set(key, tokenInfo); + metadataMap.set(key, tokenInfo); + deferredByKey.get(key)?.resolve(tokenInfo); + } + } finally { + for (const key of deferredByKey.keys()) { + pendingTokenMetadataCache.delete(key); + } + } + }), + ); + + if (pendingEntries.length > 0) { + const resolvedPendingEntries = await Promise.all( + pendingEntries.map(async ({ key, promise }) => ({ + key, + tokenInfo: await promise, + })), + ); + + for (const pendingEntry of resolvedPendingEntries) { + metadataMap.set(pendingEntry.key, pendingEntry.tokenInfo); + } + } + + return metadataMap; +}; diff --git a/src/data-sources/subgraph/market.ts b/src/data-sources/subgraph/market.ts index 5427cc2c..b21fda03 100644 --- a/src/data-sources/subgraph/market.ts +++ b/src/data-sources/subgraph/market.ts @@ -4,9 +4,8 @@ import { formatBalance } from '@/utils/balance'; import type { SupportedNetworks } from '@/utils/networks'; import type { SubgraphMarket, SubgraphMarketQueryResponse, SubgraphMarketsQueryResponse, SubgraphToken } from '@/utils/subgraph-types'; import { getSubgraphUrl } from '@/utils/subgraph-urls'; -import { blacklistTokens, type ERC20Token, findToken, type UnknownERC20Token, TokenPeg } from '@/utils/tokens'; -import { fetchMajorPrices, type MajorPrices } from '@/utils/majorPrices'; -import type { Market, MarketWarning } from '@/utils/types'; +import { blacklistTokens, findToken } from '@/utils/tokens'; +import type { Market, MarketUsdPriceSource, MarketWarning } from '@/utils/types'; import { UNRECOGNIZED_COLLATERAL, UNRECOGNIZED_LOAN } from '@/utils/warnings'; import { subgraphGraphqlFetcher } from './fetchers'; @@ -32,7 +31,6 @@ const safeParseInt = (value: string | null | undefined): number => { const transformSubgraphMarketToMarket = ( subgraphMarket: Partial, network: SupportedNetworks, - majorPrices: MajorPrices, ): Market => { const marketId = subgraphMarket.id ?? ''; const lltv = subgraphMarket.lltv ?? '0'; @@ -43,17 +41,6 @@ const transformSubgraphMarketToMarket = ( const totalBorrowShares = subgraphMarket.totalBorrowShares ?? '0'; const fee = subgraphMarket.fee ?? '0'; - const getEstimateValue = (token: ERC20Token | UnknownERC20Token): number | undefined => { - if (!('peg' in token) || token.peg === undefined) { - return undefined; - } - const peg = token.peg as TokenPeg; - if (peg === TokenPeg.USD) { - return 1; - } - return majorPrices[peg]; - }; - const mapToken = (token: Partial | undefined) => ({ id: token?.id ?? '0x', address: token?.id ?? '0x', @@ -84,7 +71,8 @@ const transformSubgraphMarketToMarket = ( let loanAssetPrice = safeParseFloat(subgraphMarket.borrowedToken?.lastPriceUSD ?? '0'); let collateralAssetPrice = safeParseFloat(subgraphMarket.inputToken?.lastPriceUSD ?? '0'); - const hasUSDPrice = loanAssetPrice > 0 && collateralAssetPrice > 0; + const hasUSDPrice = loanAssetPrice > 0; + const usdPriceSource: MarketUsdPriceSource = hasUSDPrice ? 'direct' : 'none'; const knownLoadAsset = findToken(loanAsset.address, network); const knownCollateralAsset = findToken(collateralAsset.address, network); @@ -96,16 +84,6 @@ const transformSubgraphMarketToMarket = ( warnings.push(UNRECOGNIZED_COLLATERAL); } - if (!hasUSDPrice) { - // no price available, try to estimate - if (knownLoadAsset) { - loanAssetPrice = getEstimateValue(knownLoadAsset) ?? 0; - } - if (knownCollateralAsset) { - collateralAssetPrice = getEstimateValue(knownCollateralAsset) ?? 0; - } - } - const supplyAssetsUsd = formatBalance(supplyAssets, loanAsset.decimals) * loanAssetPrice; const borrowAssetsUsd = formatBalance(borrowAssets, loanAsset.decimals) * loanAssetPrice; @@ -156,6 +134,7 @@ const transformSubgraphMarketToMarket = ( }, warnings, hasUSDPrice, + usdPriceSource, realizedBadDebt: { underlying: '0' }, supplyingVaults: [], }; @@ -185,9 +164,7 @@ export const fetchSubgraphMarket = async (uniqueKey: string, network: SupportedN return null; } - const majorPrices = await fetchMajorPrices(); - - return transformSubgraphMarketToMarket(marketData, network, majorPrices); + return transformSubgraphMarketToMarket(marketData, network); } catch (error) { console.error(`Error fetching subgraph market ${uniqueKey} on ${network}:`, error); return null; @@ -237,7 +214,6 @@ export const fetchSubgraphMarkets = async (network: SupportedNetworks): Promise< throw new Error(`Subgraph URL for network ${network} is not defined.`); } - const majorPricesPromise = fetchMajorPrices(); const allMarkets: SubgraphMarket[] = []; const firstPage = await fetchSubgraphMarketsPage(subgraphApiUrl, network, 0); @@ -272,6 +248,5 @@ export const fetchSubgraphMarkets = async (network: SupportedNetworks): Promise< } } - const majorPrices = await majorPricesPromise; - return allMarkets.map((market) => transformSubgraphMarketToMarket(market, network, majorPrices)); + return allMarkets.map((market) => transformSubgraphMarketToMarket(market, network)); }; diff --git a/src/data-sources/user-position.ts b/src/data-sources/user-position.ts new file mode 100644 index 00000000..6f4f582e --- /dev/null +++ b/src/data-sources/user-position.ts @@ -0,0 +1,38 @@ +import { hasEnvioIndexer, supportsMorphoApi } from '@/config/dataSources'; +import { fetchEnvioUserPositionForMarket } from '@/data-sources/envio/positions'; +import { fetchMorphoUserPositionForMarket } from '@/data-sources/morpho-api/positions'; +import { fetchSubgraphUserPositionForMarket } from '@/data-sources/subgraph/positions'; +import type { SupportedNetworks } from '@/utils/networks'; +import type { MarketPosition } from '@/utils/types'; + +export const fetchUserPositionForMarket = async ( + marketUniqueKey: string, + userAddress: string, + chainId: SupportedNetworks, +): Promise => { + if (hasEnvioIndexer()) { + try { + const envioPosition = await fetchEnvioUserPositionForMarket(marketUniqueKey, userAddress, chainId); + + if (envioPosition) { + return envioPosition; + } + } catch (envioError) { + console.error('Failed to fetch position via Envio:', envioError); + } + } + + if (supportsMorphoApi(chainId)) { + try { + const morphoPosition = await fetchMorphoUserPositionForMarket(marketUniqueKey, userAddress, chainId); + + if (morphoPosition) { + return morphoPosition; + } + } catch (morphoError) { + console.error('Failed to fetch position via Morpho API:', morphoError); + } + } + + return fetchSubgraphUserPositionForMarket(marketUniqueKey, userAddress, chainId); +}; diff --git a/src/features/market-detail/components/charts/volume-chart.tsx b/src/features/market-detail/components/charts/volume-chart.tsx index 9f17d9f0..2bdca027 100644 --- a/src/features/market-detail/components/charts/volume-chart.tsx +++ b/src/features/market-detail/components/charts/volume-chart.tsx @@ -65,12 +65,32 @@ function VolumeChart({ marketId, chainId, market }: VolumeChartProps) { return formatReadable(value); }; + const convertAssetValue = (raw: number | bigint | null): number => { + const value = raw ?? 0; + + if (typeof value === 'bigint') { + return Number(formatUnits(value, market.loanAsset.decimals)); + } + + if (!Number.isFinite(value)) { + return 0; + } + + // Historical asset series should be raw smallest-unit integers. + // Be tolerant of cached decimal display-unit points during source transitions. + if (!Number.isInteger(value)) { + return value; + } + + return Number(formatUnits(BigInt(value), market.loanAsset.decimals)); + }; + const convertValue = (raw: number | bigint | null): number => { const value = raw ?? 0; if (volumeView === 'USD') { return Number(value); } - return Number(formatUnits(BigInt(value), market.loanAsset.decimals)); + return convertAssetValue(value); }; const chartData = useMemo(() => { @@ -159,7 +179,7 @@ function VolumeChart({ marketId, chainId, market }: VolumeChartProps) { if (validAssetData.length === 0) return { current, netChangePercentage: 0, average: 0 }; // Net change percentage: compare asset-to-asset for consistent units - const startAsset = Number(formatUnits(BigInt(validAssetData[0].y ?? 0), market.loanAsset.decimals)); + const startAsset = convertAssetValue(validAssetData[0].y ?? 0); const netChangePercentage = startAsset !== 0 ? ((current - startAsset) / startAsset) * 100 : 0; // Average: use selected view data (USD or Asset) for display diff --git a/src/features/markets/components/table/market-row-detail.tsx b/src/features/markets/components/table/market-row-detail.tsx index ab62053a..0ecf2437 100644 --- a/src/features/markets/components/table/market-row-detail.tsx +++ b/src/features/markets/components/table/market-row-detail.tsx @@ -32,7 +32,7 @@ export function ExpandedMarketDetail({ market }: { market: Market }) {

Available Liquidity

- + {formatReadable(Number(market.state.liquidityAssetsUsd))}

diff --git a/src/features/markets/components/table/market-table-body.tsx b/src/features/markets/components/table/market-table-body.tsx index 18b33647..89c20e53 100644 --- a/src/features/markets/components/table/market-table-body.tsx +++ b/src/features/markets/components/table/market-table-body.tsx @@ -177,7 +177,7 @@ export function MarketTableBody({ currentEntries, expandedRowId, setExpandedRowI assets={item.state.supplyAssets} decimals={item.loanAsset.decimals} symbol={item.loanAsset.symbol} - isEstimated={!item.hasUSDPrice} + isEstimated={item.usdPriceSource === 'peg'} /> )} {columnVisibility.totalBorrow && ( @@ -187,7 +187,7 @@ export function MarketTableBody({ currentEntries, expandedRowId, setExpandedRowI assets={item.state.borrowAssets} decimals={item.loanAsset.decimals} symbol={item.loanAsset.symbol} - isEstimated={!item.hasUSDPrice} + isEstimated={item.usdPriceSource === 'peg'} /> )} {columnVisibility.liquidity && ( @@ -197,7 +197,7 @@ export function MarketTableBody({ currentEntries, expandedRowId, setExpandedRowI assets={item.state.liquidityAssets} decimals={item.loanAsset.decimals} symbol={item.loanAsset.symbol} - isEstimated={!item.hasUSDPrice} + isEstimated={item.usdPriceSource === 'peg'} /> )} {columnVisibility.supplyAPY && ( diff --git a/src/features/ui-lab/fixtures/market-fixtures.ts b/src/features/ui-lab/fixtures/market-fixtures.ts index bcff07f2..53cd4187 100644 --- a/src/features/ui-lab/fixtures/market-fixtures.ts +++ b/src/features/ui-lab/fixtures/market-fixtures.ts @@ -86,6 +86,7 @@ export const createUiLabMarketFixture = (): Market => ({ }, supplyingVaults: [], hasUSDPrice: true, + usdPriceSource: 'direct', warnings: [], }); diff --git a/src/graphql/envio-queries.ts b/src/graphql/envio-queries.ts new file mode 100644 index 00000000..222fdfbe --- /dev/null +++ b/src/graphql/envio-queries.ts @@ -0,0 +1,218 @@ +export const envioMarketsQuery = ` + query EnvioMarkets($limit: Int!, $offset: Int!, $where: Market_bool_exp) { + Market(limit: $limit, offset: $offset, where: $where, order_by: [{ chainId: asc }, { marketId: asc }]) { + chainId + marketId + loanToken + collateralToken + oracle + irm + lltv + fee + lastUpdate + rateAtTarget + totalSupplyAssets + totalSupplyShares + totalBorrowAssets + totalBorrowShares + } + } +`; + +export const envioMarketQuery = ` + query EnvioMarket($chainId: Int!, $marketId: String!) { + Market( + limit: 1 + where: { + chainId: { _eq: $chainId } + marketId: { _eq: $marketId } + } + ) { + chainId + marketId + loanToken + collateralToken + oracle + irm + lltv + fee + lastUpdate + rateAtTarget + totalSupplyAssets + totalSupplyShares + totalBorrowAssets + totalBorrowShares + } + } +`; + +export const envioPositionsQuery = ` + query EnvioPositions($limit: Int!, $offset: Int!, $where: Position_bool_exp) { + Position(limit: $limit, offset: $offset, where: $where, order_by: [{ chainId: asc }, { marketId: asc }]) { + chainId + marketId + supplyShares + borrowShares + collateral + user + } + } +`; + +export const envioPositionForMarketQuery = ` + query EnvioPositionForMarket($chainId: Int!, $marketId: String!, $user: String!) { + Position( + limit: 1 + where: { + chainId: { _eq: $chainId } + marketId: { _eq: $marketId } + user: { _eq: $user } + } + ) { + chainId + marketId + supplyShares + borrowShares + collateral + user + } + } +`; + +export const envioMarketSuppliersQuery = ` + query EnvioMarketSuppliers($limit: Int!, $offset: Int!, $where: Position_bool_exp) { + Position(limit: $limit, offset: $offset, where: $where, order_by: [{ supplyShares: desc }, { user: asc }]) { + chainId + marketId + supplyShares + user + } + } +`; + +export const envioMarketBorrowersQuery = ` + query EnvioMarketBorrowers($limit: Int!, $offset: Int!, $where: Position_bool_exp) { + Position(limit: $limit, offset: $offset, where: $where, order_by: [{ borrowShares: desc }, { user: asc }]) { + chainId + marketId + borrowShares + collateral + user + } + } +`; + +export const envioSupplyEventsQuery = ` + query EnvioSupplyEvents($limit: Int!, $offset: Int!, $where: Morpho_Supply_bool_exp) { + Morpho_Supply(limit: $limit, offset: $offset, where: $where, order_by: [{ timestamp: desc }, { id: desc }]) { + assets + chainId + market_id + onBehalf + shares + timestamp + txHash + } + } +`; + +export const envioWithdrawEventsQuery = ` + query EnvioWithdrawEvents($limit: Int!, $offset: Int!, $where: Morpho_Withdraw_bool_exp) { + Morpho_Withdraw(limit: $limit, offset: $offset, where: $where, order_by: [{ timestamp: desc }, { id: desc }]) { + assets + chainId + market_id + onBehalf + receiver + shares + timestamp + txHash + } + } +`; + +export const envioBorrowEventsQuery = ` + query EnvioBorrowEvents($limit: Int!, $offset: Int!, $where: Morpho_Borrow_bool_exp) { + Morpho_Borrow(limit: $limit, offset: $offset, where: $where, order_by: [{ timestamp: desc }, { id: desc }]) { + assets + chainId + market_id + onBehalf + receiver + shares + timestamp + txHash + } + } +`; + +export const envioRepayEventsQuery = ` + query EnvioRepayEvents($limit: Int!, $offset: Int!, $where: Morpho_Repay_bool_exp) { + Morpho_Repay(limit: $limit, offset: $offset, where: $where, order_by: [{ timestamp: desc }, { id: desc }]) { + assets + chainId + market_id + onBehalf + shares + timestamp + txHash + } + } +`; + +export const envioSupplyCollateralEventsQuery = ` + query EnvioSupplyCollateralEvents($limit: Int!, $offset: Int!, $where: Morpho_SupplyCollateral_bool_exp) { + Morpho_SupplyCollateral(limit: $limit, offset: $offset, where: $where, order_by: [{ timestamp: desc }, { id: desc }]) { + assets + chainId + market_id + onBehalf + timestamp + txHash + } + } +`; + +export const envioWithdrawCollateralEventsQuery = ` + query EnvioWithdrawCollateralEvents($limit: Int!, $offset: Int!, $where: Morpho_WithdrawCollateral_bool_exp) { + Morpho_WithdrawCollateral(limit: $limit, offset: $offset, where: $where, order_by: [{ timestamp: desc }, { id: desc }]) { + assets + chainId + market_id + onBehalf + receiver + timestamp + txHash + } + } +`; + +export const envioLiquidationsQuery = ` + query EnvioLiquidations($limit: Int!, $offset: Int!, $where: Morpho_Liquidate_bool_exp) { + Morpho_Liquidate(limit: $limit, offset: $offset, where: $where, order_by: [{ timestamp: desc }, { id: desc }]) { + badDebtAssets + borrower + caller + chainId + market_id + repaidAssets + repaidShares + seizedAssets + timestamp + txHash + } + } +`; + +export const envioBorrowRateUpdatesQuery = ` + query EnvioBorrowRateUpdates($limit: Int!, $offset: Int!, $where: AdaptiveCurveIrm_BorrowRateUpdate_bool_exp) { + AdaptiveCurveIrm_BorrowRateUpdate(limit: $limit, offset: $offset, where: $where, order_by: [{ timestamp: asc }, { id: asc }]) { + avgBorrowRate + chainId + market_id + rateAtTarget + timestamp + txHash + } + } +`; diff --git a/src/hooks/queries/fetchUserTransactions.ts b/src/hooks/queries/fetchUserTransactions.ts index d506a9d5..3c7e84b0 100644 --- a/src/hooks/queries/fetchUserTransactions.ts +++ b/src/hooks/queries/fetchUserTransactions.ts @@ -1,4 +1,5 @@ -import { supportsMorphoApi } from '@/config/dataSources'; +import { hasEnvioIndexer, supportsMorphoApi } from '@/config/dataSources'; +import { fetchEnvioTransactions } from '@/data-sources/envio/transactions'; import { fetchMorphoTransactions } from '@/data-sources/morpho-api/transactions'; import { fetchSubgraphTransactions } from '@/data-sources/subgraph/transactions'; import { isSupportedChain } from '@/utils/networks'; @@ -50,7 +51,7 @@ export async function fetchUserTransactions(filters: TransactionFilters): Promis } // Check subgraph user address limitation - if (!supportsMorphoApi(chainId) && filters.userAddress.length !== 1) { + if (!hasEnvioIndexer() && !supportsMorphoApi(chainId) && filters.userAddress.length !== 1) { const errorMsg = 'Subgraph data source requires exactly one user address.'; console.error(errorMsg); return { @@ -60,7 +61,18 @@ export async function fetchUserTransactions(filters: TransactionFilters): Promis }; } - // Try Morpho API first if supported + if (hasEnvioIndexer()) { + try { + const response = await fetchEnvioTransactions(filters); + if (!response.error) { + return response; + } + } catch (envioError) { + console.warn(`Envio failed for chain ${chainId}, falling back to legacy sources:`, envioError); + } + } + + // Try Morpho API next if supported if (supportsMorphoApi(chainId)) { try { const response = await fetchMorphoTransactions(filters); @@ -74,7 +86,7 @@ export async function fetchUserTransactions(filters: TransactionFilters): Promis } } - // Fallback to Subgraph + // Final fallback to Subgraph try { return await fetchSubgraphTransactions(filters, chainId); } catch (subgraphError) { diff --git a/src/hooks/queries/useMarketMetricsQuery.ts b/src/hooks/queries/useMarketMetricsQuery.ts index d9aa8248..cf5a605b 100644 --- a/src/hooks/queries/useMarketMetricsQuery.ts +++ b/src/hooks/queries/useMarketMetricsQuery.ts @@ -170,7 +170,6 @@ export const useMarketMetricsMap = (params: MarketMetricsParams = {}) => { const key = getMetricsKey(market.chainId, market.marketUniqueKey); map.set(key, market); } - console.log('[Metrics] Loaded', map.size, 'of', data.total, 'markets'); return map; }, [data?.markets, data?.total]); diff --git a/src/hooks/queries/useMarketsQuery.ts b/src/hooks/queries/useMarketsQuery.ts index 81e348e1..b761c5f2 100644 --- a/src/hooks/queries/useMarketsQuery.ts +++ b/src/hooks/queries/useMarketsQuery.ts @@ -1,22 +1,16 @@ import { useQuery } from '@tanstack/react-query'; -import { supportsMorphoApi } from '@/config/dataSources'; -import { fetchMorphoMarkets } from '@/data-sources/morpho-api/market'; -import { fetchSubgraphMarkets } from '@/data-sources/subgraph/market'; +import { useCustomRpcContext } from '@/components/providers/CustomRpcProvider'; +import { fetchMarketCatalog } from '@/data-sources/market-catalog'; import { ALL_SUPPORTED_NETWORKS, isSupportedChain } from '@/utils/networks'; import type { Market } from '@/utils/types'; -const toError = (error: unknown): Error => { - if (error instanceof Error) return error; - return new Error(String(error)); -}; - /** * Fetches markets from all supported networks using React Query. * * Data fetching strategy: - * - Tries Morpho API first (if supported) - * - Falls back to Subgraph if API fails - * - Combines markets from all networks + * - Uses the shared market catalog adapter to fetch all supported chains in one go + * - Merges Morpho API metadata with Envio state when Envio is configured + * - Falls back to per-network Morpho/Subgraph fetching only if the cross-chain path fails * - Applies basic filtering (required fields, supported chains) * * Cache behavior: @@ -30,47 +24,13 @@ const toError = (error: unknown): Error => { * ``` */ export const useMarketsQuery = () => { + const { customRpcUrls, rpcConfigVersion } = useCustomRpcContext(); + return useQuery({ - queryKey: ['markets'], + queryKey: ['markets', rpcConfigVersion], queryFn: async () => { - const combinedMarkets: Market[] = []; - const fetchErrors: Error[] = []; - - // Fetch markets for each network based on its data source. - // Use allSettled so a single chain failure cannot reject the whole query. - const results = await Promise.allSettled( - ALL_SUPPORTED_NETWORKS.map(async (network) => { - let networkMarkets: Market[] = []; - let trySubgraph = !supportsMorphoApi(network); - - // Try Morpho API first if supported - if (!trySubgraph) { - try { - networkMarkets = await fetchMorphoMarkets(network); - } catch { - trySubgraph = true; - // Continue to Subgraph fallback - } - } - - // If Morpho API failed or not supported, try Subgraph - if (trySubgraph) { - networkMarkets = await fetchSubgraphMarkets(network); - } - - return networkMarkets; - }), - ); - - results.forEach((result, index) => { - if (result.status === 'fulfilled') { - combinedMarkets.push(...result.value); - } else { - const network = ALL_SUPPORTED_NETWORKS[index]; - const error = toError(result.reason); - console.error(`Failed to fetch markets for network ${network}:`, error); - fetchErrors.push(error); - } + const combinedMarkets = await fetchMarketCatalog(ALL_SUPPORTED_NETWORKS, { + customRpcUrls, }); // Apply basic filtering @@ -79,11 +39,6 @@ export const useMarketsQuery = () => { .filter((market) => market.loanAsset && market.collateralAsset) .filter((market) => isSupportedChain(market.morphoBlue.chain.id)); - // If everything failed, surface an error so the UI can react. - if (filtered.length === 0 && fetchErrors.length > 0) { - throw fetchErrors[0]; - } - return filtered; }, staleTime: 5 * 60 * 1000, // Data is fresh for 5 minutes diff --git a/src/hooks/queries/useTokensQuery.ts b/src/hooks/queries/useTokensQuery.ts index f8e7ee36..d4b16f48 100644 --- a/src/hooks/queries/useTokensQuery.ts +++ b/src/hooks/queries/useTokensQuery.ts @@ -1,107 +1,25 @@ import { useQuery } from '@tanstack/react-query'; import { useCallback } from 'react'; -import { z } from 'zod'; -import { SupportedNetworks, getViemChain } from '@/utils/networks'; -import { supportedTokens } from '@/utils/tokens'; import type { ERC20Token } from '@/utils/tokens'; - -const PendleAssetSchema = z.object({ - address: z.string(), - chainId: z.number(), - symbol: z.string(), - decimals: z.number(), - proIcon: z.string().nullable(), -}); - -type PendleAsset = z.infer; - -const localTokensWithSource: ERC20Token[] = supportedTokens.map((token) => ({ - ...token, - source: 'local', -})); - -async function fetchPendleAssets(chainId: number): Promise { - try { - const response = await fetch(`https://api-v2.pendle.finance/core/v1/${chainId}/assets/all`); - if (!response.ok) return []; - const data = (await response.json()) as PendleAsset[]; - return z.array(PendleAssetSchema).parse(data); - } catch (error) { - console.error(`Error fetching Pendle assets for chain ${chainId}:`, error); - return []; - } -} - -function convertPendleAssetToToken(asset: PendleAsset, chainId: SupportedNetworks): ERC20Token { - return { - symbol: asset.symbol, - decimals: asset.decimals, - img: asset.proIcon ?? undefined, - networks: [ - { - chain: getViemChain(chainId), - address: asset.address, - }, - ], - isFactoryToken: true, - protocol: { - name: 'Pendle', - }, - source: 'external', - }; -} +import { fetchMergedTokenCatalog, findTokenInCatalog, getLocalTokenCatalog } from '@/utils/tokenCatalog'; // Fetches tokens from Pendle API and merges with local tokens export const useTokensQuery = () => { const query = useQuery({ queryKey: ['tokens'], queryFn: async () => { - try { - const [mainnetAssets, baseAssets, arbitrumAssets, hyperevmAssets] = await Promise.all([ - fetchPendleAssets(SupportedNetworks.Mainnet), - fetchPendleAssets(SupportedNetworks.Base), - fetchPendleAssets(SupportedNetworks.Arbitrum), - fetchPendleAssets(SupportedNetworks.HyperEVM), - ]); - - const pendleTokens = [ - ...mainnetAssets.map((a) => convertPendleAssetToToken(a, SupportedNetworks.Mainnet)), - ...baseAssets.map((a) => convertPendleAssetToToken(a, SupportedNetworks.Base)), - ...arbitrumAssets.map((a) => convertPendleAssetToToken(a, SupportedNetworks.Arbitrum)), - ...hyperevmAssets.map((a) => convertPendleAssetToToken(a, SupportedNetworks.HyperEVM)), - ]; - - const filteredPendleTokens = pendleTokens.filter((pendleToken) => { - return !pendleToken.networks.some((pendleNetwork) => - supportedTokens.some((supportedToken) => - supportedToken.networks.some( - (supportedNetwork) => - supportedNetwork.address.toLowerCase() === pendleNetwork.address.toLowerCase() && - supportedNetwork.chain.id === pendleNetwork.chain.id, - ), - ), - ); - }); - - return [...localTokensWithSource, ...filteredPendleTokens]; - } catch (err) { - console.error('Error fetching Pendle assets:', err); - throw err; - } + return fetchMergedTokenCatalog(); }, staleTime: 5 * 60 * 1000, refetchInterval: 5 * 60 * 1000, refetchOnWindowFocus: true, }); - const allTokens = query.data ?? localTokensWithSource; + const allTokens: ERC20Token[] = query.data ?? getLocalTokenCatalog(); const findToken = useCallback( (address: string, chainId: number) => { - if (!address || !chainId) return undefined; - return allTokens.find((token) => - token.networks.some((network) => network.address?.toLowerCase() === address.toLowerCase() && network.chain.id === chainId), - ); + return findTokenInCatalog(allTokens, address, chainId); }, [allTokens], ); diff --git a/src/hooks/useAllMarketPositions.ts b/src/hooks/useAllMarketPositions.ts index ebf487f6..9be8df7a 100644 --- a/src/hooks/useAllMarketPositions.ts +++ b/src/hooks/useAllMarketPositions.ts @@ -1,9 +1,5 @@ import { useQuery } from '@tanstack/react-query'; -import { supportsMorphoApi } from '@/config/dataSources'; -import { fetchMorphoMarketBorrowers } from '@/data-sources/morpho-api/market-borrowers'; -import { fetchMorphoMarketSuppliers } from '@/data-sources/morpho-api/market-suppliers'; -import { fetchSubgraphMarketBorrowers } from '@/data-sources/subgraph/market-borrowers'; -import { fetchSubgraphMarketSuppliers } from '@/data-sources/subgraph/market-suppliers'; +import { fetchMarketBorrowers, fetchMarketSuppliers } from '@/data-sources/market-participants'; import type { SupportedNetworks } from '@/utils/networks'; import type { MarketBorrower, MarketSupplier } from '@/utils/types'; @@ -33,23 +29,7 @@ export const useAllMarketBorrowers = (marketId: string | undefined, network: Sup queryFn: async () => { if (!marketId || !network) return null; - let result = null; - - // Try Morpho API first - if (supportsMorphoApi(network)) { - try { - result = await fetchMorphoMarketBorrowers(marketId, Number(network), '1', TOP_POSITIONS_LIMIT, 0); - } catch { - // Morpho API failed, will fall back to subgraph - } - } - - // Fallback to Subgraph if Morpho API failed or returned empty - if (!result || result.items?.length === 0) { - result = await fetchSubgraphMarketBorrowers(marketId, network, '1', TOP_POSITIONS_LIMIT, 0); - } - - return result; + return fetchMarketBorrowers(marketId, network, '1', TOP_POSITIONS_LIMIT, 0); }, enabled: !!marketId && !!network, staleTime: 1000 * 60 * 2, // 2 minutes @@ -73,23 +53,7 @@ export const useAllMarketSuppliers = (marketId: string | undefined, network: Sup queryFn: async () => { if (!marketId || !network) return null; - let result = null; - - // Try Morpho API first - if (supportsMorphoApi(network)) { - try { - result = await fetchMorphoMarketSuppliers(marketId, Number(network), '1', TOP_POSITIONS_LIMIT, 0); - } catch { - // Morpho API failed, will fall back to subgraph - } - } - - // Fallback to Subgraph if Morpho API failed or returned empty - if (!result || result.items?.length === 0) { - result = await fetchSubgraphMarketSuppliers(marketId, network, '1', TOP_POSITIONS_LIMIT, 0); - } - - return result; + return fetchMarketSuppliers(marketId, network, '1', TOP_POSITIONS_LIMIT, 0); }, enabled: !!marketId && !!network, staleTime: 1000 * 60 * 2, // 2 minutes diff --git a/src/hooks/useMarketBorrowers.ts b/src/hooks/useMarketBorrowers.ts index 171006a7..d5a234ec 100644 --- a/src/hooks/useMarketBorrowers.ts +++ b/src/hooks/useMarketBorrowers.ts @@ -1,8 +1,6 @@ import { useCallback, useEffect } from 'react'; import { useQuery, useQueryClient } from '@tanstack/react-query'; -import { supportsMorphoApi } from '@/config/dataSources'; -import { fetchMorphoMarketBorrowers } from '@/data-sources/morpho-api/market-borrowers'; -import { fetchSubgraphMarketBorrowers } from '@/data-sources/subgraph/market-borrowers'; +import { fetchMarketBorrowers } from '@/data-sources/market-participants'; import type { SupportedNetworks } from '@/utils/networks'; import type { PaginatedMarketBorrowers } from '@/utils/types'; @@ -40,30 +38,7 @@ export const useMarketBorrowers = ( } const targetSkip = (targetPage - 1) * pageSize; - let result: PaginatedMarketBorrowers | null = null; - - // Try Morpho API first if supported - if (supportsMorphoApi(network)) { - try { - console.log(`Attempting to fetch borrowers via Morpho API for ${marketId} (page ${targetPage})`); - result = await fetchMorphoMarketBorrowers(marketId, Number(network), effectiveMinShares, pageSize, targetSkip); - } catch (morphoError) { - console.error('Failed to fetch borrowers via Morpho API:', morphoError); - } - } - - // Fallback to Subgraph if Morpho API failed or not supported - if (!result) { - try { - console.log(`Attempting to fetch borrowers via Subgraph for ${marketId} (page ${targetPage})`); - result = await fetchSubgraphMarketBorrowers(marketId, network, effectiveMinShares, pageSize, targetSkip); - } catch (subgraphError) { - console.error('Failed to fetch borrowers via Subgraph:', subgraphError); - throw subgraphError; - } - } - - return result; + return fetchMarketBorrowers(marketId, network, effectiveMinShares, pageSize, targetSkip); }, [marketId, network, effectiveMinShares, pageSize], ); diff --git a/src/hooks/useMarketBorrows.ts b/src/hooks/useMarketBorrows.ts index f5fac444..9a08e33f 100644 --- a/src/hooks/useMarketBorrows.ts +++ b/src/hooks/useMarketBorrows.ts @@ -1,8 +1,6 @@ import { useEffect, useCallback } from 'react'; import { useQuery, useQueryClient } from '@tanstack/react-query'; -import { supportsMorphoApi } from '@/config/dataSources'; -import { fetchMorphoMarketBorrows } from '@/data-sources/morpho-api/market-borrows'; -import { fetchSubgraphMarketBorrows } from '@/data-sources/subgraph/market-borrows'; +import { fetchMarketBorrows } from '@/data-sources/market-activity'; import type { SupportedNetworks } from '@/utils/networks'; import type { PaginatedMarketActivityTransactions } from '@/utils/types'; @@ -37,30 +35,7 @@ export const useMarketBorrows = ( } const targetSkip = (targetPage - 1) * pageSize; - let result: PaginatedMarketActivityTransactions | null = null; - - // Try Morpho API first if supported - if (supportsMorphoApi(network)) { - try { - console.log(`Attempting to fetch borrows via Morpho API for ${marketId} (page ${targetPage})`); - result = await fetchMorphoMarketBorrows(marketId, minAssets, pageSize, targetSkip); - } catch (morphoError) { - console.error('Failed to fetch borrows via Morpho API:', morphoError); - } - } - - // Fallback to Subgraph if Morpho API failed or not supported - if (!result) { - try { - console.log(`Attempting to fetch borrows via Subgraph for ${marketId} (page ${targetPage})`); - result = await fetchSubgraphMarketBorrows(marketId, loanAssetId, network, minAssets, pageSize, targetSkip); - } catch (subgraphError) { - console.error('Failed to fetch borrows via Subgraph:', subgraphError); - throw subgraphError; - } - } - - return result; + return fetchMarketBorrows(marketId, loanAssetId, network, minAssets, pageSize, targetSkip); }, [marketId, loanAssetId, network, minAssets, pageSize], ); diff --git a/src/hooks/useMarketData.ts b/src/hooks/useMarketData.ts index fd6f10ca..9fce5ba0 100644 --- a/src/hooks/useMarketData.ts +++ b/src/hooks/useMarketData.ts @@ -1,24 +1,22 @@ import { useMemo } from 'react'; import { useQuery } from '@tanstack/react-query'; import { usePublicClient } from 'wagmi'; -import { supportsMorphoApi } from '@/config/dataSources'; +import { useCustomRpcContext } from '@/components/providers/CustomRpcProvider'; +import { fetchMarketDetails } from '@/data-sources/market-details'; import { useOracleDataQuery } from '@/hooks/queries/useOracleDataQuery'; -import { fetchMorphoMarket } from '@/data-sources/morpho-api/market'; -import { fetchSubgraphMarket } from '@/data-sources/subgraph/market'; import type { SupportedNetworks } from '@/utils/networks'; import { fetchMarketSnapshot } from '@/utils/positions'; import type { Market } from '@/utils/types'; export const useMarketData = (uniqueKey: string | undefined, network: SupportedNetworks | undefined) => { - const queryKey = ['marketData', uniqueKey, network]; + const { customRpcUrls, rpcConfigVersion } = useCustomRpcContext(); + const queryKey = ['marketData', uniqueKey, network, rpcConfigVersion]; const publicClient = usePublicClient({ chainId: network }); const { getOracleData } = useOracleDataQuery(); const { data, isLoading, error, refetch } = useQuery({ queryKey: queryKey, queryFn: async (): Promise => { - console.log('fetching market'); - if (!uniqueKey || !network) { return null; } @@ -29,44 +27,22 @@ export const useMarketData = (uniqueKey: string | undefined, network: SupportedN } // 1. Try fetching the on-chain market snapshot first - console.log(`Attempting fetchMarketSnapshot for market ${uniqueKey}`); let snapshot = null; try { snapshot = await fetchMarketSnapshot(uniqueKey, network, publicClient); - console.log(`Market state (from RPC) result for ${uniqueKey}:`, snapshot ? 'Exists' : 'Null'); } catch (snapshotError) { console.error(`Error fetching market snapshot for ${uniqueKey}:`, snapshotError); // Snapshot fetch failed, will proceed to fallback fetch } - let finalMarket: Market | null = null; - - // 2. Try Morpho API first if supported, then fallback to Subgraph - try { - if (supportsMorphoApi(network)) { - console.log(`Attempting to fetch market data via Morpho API for ${uniqueKey}`); - finalMarket = await fetchMorphoMarket(uniqueKey, network); - } - } catch (morphoError) { - console.error('Failed to fetch market data via Morpho API:', morphoError); - // Continue to Subgraph fallback - } - - // 3. If Morpho API failed or not supported, try Subgraph - if (!finalMarket) { - try { - console.log(`Attempting to fetch market data via Subgraph for ${uniqueKey}`); - finalMarket = await fetchSubgraphMarket(uniqueKey, network); - } catch (subgraphError) { - console.error('Failed to fetch market data via Subgraph:', subgraphError); - finalMarket = null; - } - } + const finalMarket = await fetchMarketDetails(uniqueKey, network, { + customRpcUrls, + enrichHistoricalApys: true, + }); - // 4. If we have both snapshot and market data, override the state fields with snapshot + // 3. If we have both snapshot and market data, override the state fields with snapshot if (snapshot && finalMarket) { - console.log(`Found market snapshot for ${uniqueKey}, overriding state with on-chain data.`); - finalMarket = { + return { ...finalMarket, state: { ...finalMarket.state, @@ -78,17 +54,19 @@ export const useMarketData = (uniqueKey: string | undefined, network: SupportedN liquidityAssets: snapshot.liquidityAssets, }, }; - } else if (!finalMarket) { + } + + if (!finalMarket) { // Both data sources failed - console.error(`Failed to fetch market data for ${uniqueKey} via both Morpho API and Subgraph.`); - finalMarket = null; - } else if (!snapshot) { + console.error(`Failed to fetch market data for ${uniqueKey} via Envio and fallback sources.`); + return null; + } + + if (!snapshot) { // Snapshot failed but data source succeeded - just use data source console.warn(`Market snapshot failed for ${uniqueKey}, using data source only.`); } - console.log(`Final market data for ${uniqueKey}:`, finalMarket ? 'Found' : 'Not Found'); - return finalMarket; }, enabled: !!uniqueKey && !!network, diff --git a/src/hooks/useMarketHistoricalData.ts b/src/hooks/useMarketHistoricalData.ts index 4929aa48..81a24c96 100644 --- a/src/hooks/useMarketHistoricalData.ts +++ b/src/hooks/useMarketHistoricalData.ts @@ -1,7 +1,7 @@ import { useQuery } from '@tanstack/react-query'; -import { supportsMorphoApi } from '@/config/dataSources'; -import { fetchMorphoMarketHistoricalData, type HistoricalDataSuccessResult } from '@/data-sources/morpho-api/historical'; -import { fetchSubgraphMarketHistoricalData } from '@/data-sources/subgraph/historical'; +import { useCustomRpcContext } from '@/components/providers/CustomRpcProvider'; +import { fetchMarketHistoricalData } from '@/data-sources/market-historical'; +import type { HistoricalDataSuccessResult } from '@/data-sources/morpho-api/historical'; import type { SupportedNetworks } from '@/utils/networks'; import type { TimeseriesOptions } from '@/utils/types'; @@ -10,45 +10,19 @@ export const useMarketHistoricalData = ( network: SupportedNetworks | undefined, options: TimeseriesOptions | undefined, ) => { - const queryKey = ['marketHistoricalData', uniqueKey, network, options?.startTimestamp, options?.endTimestamp, options?.interval]; + const { customRpcUrls, rpcConfigVersion } = useCustomRpcContext(); + const queryKey = ['marketHistoricalData', uniqueKey, network, options?.startTimestamp, options?.endTimestamp, options?.interval, rpcConfigVersion]; const { data, isLoading, error, refetch } = useQuery({ queryKey: queryKey, queryFn: async (): Promise => { if (!uniqueKey || !network || !options) { - console.log('Historical data prerequisites not met.', { - uniqueKey, - network, - options, - }); return null; } - let historicalData: HistoricalDataSuccessResult | null = null; - - // Try Morpho API first if supported - if (supportsMorphoApi(network)) { - try { - console.log(`Attempting to fetch historical data via Morpho API for ${uniqueKey}`); - historicalData = await fetchMorphoMarketHistoricalData(uniqueKey, network, options); - } catch (morphoError) { - console.error('Failed to fetch historical data via Morpho API:', morphoError); - // Continue to Subgraph fallback - } - } - - // If Morpho API failed or not supported, try Subgraph - if (!historicalData) { - try { - console.log(`Attempting to fetch historical data via Subgraph for ${uniqueKey}`); - historicalData = await fetchSubgraphMarketHistoricalData(uniqueKey, network, options); - } catch (subgraphError) { - console.error('Failed to fetch historical data via Subgraph:', subgraphError); - historicalData = null; - } - } - - return historicalData; + return fetchMarketHistoricalData(uniqueKey, network, options, { + customRpcUrls, + }); }, enabled: !!uniqueKey && !!network && !!options, staleTime: 1000 * 60 * 5, diff --git a/src/hooks/useMarketLiquidations.ts b/src/hooks/useMarketLiquidations.ts index d15b5b3a..7bd019a1 100644 --- a/src/hooks/useMarketLiquidations.ts +++ b/src/hooks/useMarketLiquidations.ts @@ -1,7 +1,5 @@ import { useQuery } from '@tanstack/react-query'; -import { supportsMorphoApi } from '@/config/dataSources'; -import { fetchMorphoMarketLiquidations } from '@/data-sources/morpho-api/market-liquidations'; -import { fetchSubgraphMarketLiquidations } from '@/data-sources/subgraph/market-liquidations'; +import { fetchMarketLiquidations } from '@/data-sources/market-activity'; import type { SupportedNetworks } from '@/utils/networks'; import type { MarketLiquidationTransaction } from '@/utils/types'; // Use simplified type @@ -22,31 +20,7 @@ export const useMarketLiquidations = (marketId: string | undefined, network: Sup return null; } - let liquidations: MarketLiquidationTransaction[] | null = null; - - // Try Morpho API first if supported - if (supportsMorphoApi(network)) { - try { - console.log(`Attempting to fetch liquidations via Morpho API for ${marketId}`); - liquidations = await fetchMorphoMarketLiquidations(marketId); - } catch (morphoError) { - console.error('Failed to fetch liquidations via Morpho API:', morphoError); - // Continue to Subgraph fallback - } - } - - // If Morpho API failed or not supported, try Subgraph - if (!liquidations) { - try { - console.log(`Attempting to fetch liquidations via Subgraph for ${marketId}`); - liquidations = await fetchSubgraphMarketLiquidations(marketId, network); - } catch (subgraphError) { - console.error('Failed to fetch liquidations via Subgraph:', subgraphError); - liquidations = null; - } - } - - return liquidations; + return fetchMarketLiquidations(marketId, network); }, enabled: !!marketId && !!network, staleTime: 1000 * 60 * 5, // 5 minutes, liquidations are less frequent diff --git a/src/hooks/useMarketSuppliers.ts b/src/hooks/useMarketSuppliers.ts index ccaa1700..33e67b14 100644 --- a/src/hooks/useMarketSuppliers.ts +++ b/src/hooks/useMarketSuppliers.ts @@ -1,8 +1,6 @@ import { useCallback, useEffect } from 'react'; import { useQuery, useQueryClient } from '@tanstack/react-query'; -import { supportsMorphoApi } from '@/config/dataSources'; -import { fetchMorphoMarketSuppliers } from '@/data-sources/morpho-api/market-suppliers'; -import { fetchSubgraphMarketSuppliers } from '@/data-sources/subgraph/market-suppliers'; +import { fetchMarketSuppliers } from '@/data-sources/market-participants'; import type { SupportedNetworks } from '@/utils/networks'; import type { PaginatedMarketSuppliers } from '@/utils/types'; @@ -40,30 +38,7 @@ export const useMarketSuppliers = ( } const targetSkip = (targetPage - 1) * pageSize; - let result: PaginatedMarketSuppliers | null = null; - - // Try Morpho API first if supported - if (supportsMorphoApi(network)) { - try { - console.log(`Attempting to fetch suppliers via Morpho API for ${marketId} (page ${targetPage})`); - result = await fetchMorphoMarketSuppliers(marketId, Number(network), effectiveMinShares, pageSize, targetSkip); - } catch (morphoError) { - console.error('Failed to fetch suppliers via Morpho API:', morphoError); - } - } - - // Fallback to Subgraph if Morpho API failed or not supported - if (!result) { - try { - console.log(`Attempting to fetch suppliers via Subgraph for ${marketId} (page ${targetPage})`); - result = await fetchSubgraphMarketSuppliers(marketId, network, effectiveMinShares, pageSize, targetSkip); - } catch (subgraphError) { - console.error('Failed to fetch suppliers via Subgraph:', subgraphError); - throw subgraphError; - } - } - - return result; + return fetchMarketSuppliers(marketId, network, effectiveMinShares, pageSize, targetSkip); }, [marketId, network, effectiveMinShares, pageSize], ); diff --git a/src/hooks/useMarketSupplies.ts b/src/hooks/useMarketSupplies.ts index 3da92603..57c2ae78 100644 --- a/src/hooks/useMarketSupplies.ts +++ b/src/hooks/useMarketSupplies.ts @@ -1,8 +1,6 @@ import { useEffect, useCallback } from 'react'; import { useQuery, useQueryClient } from '@tanstack/react-query'; -import { supportsMorphoApi } from '@/config/dataSources'; -import { fetchMorphoMarketSupplies } from '@/data-sources/morpho-api/market-supplies'; -import { fetchSubgraphMarketSupplies } from '@/data-sources/subgraph/market-supplies'; +import { fetchMarketSupplies } from '@/data-sources/market-activity'; import type { SupportedNetworks } from '@/utils/networks'; import type { PaginatedMarketActivityTransactions } from '@/utils/types'; @@ -37,30 +35,7 @@ export const useMarketSupplies = ( } const targetSkip = (targetPage - 1) * pageSize; - let result: PaginatedMarketActivityTransactions | null = null; - - // Try Morpho API first if supported - if (supportsMorphoApi(network)) { - try { - console.log(`Attempting to fetch supplies via Morpho API for ${marketId} (page ${targetPage})`); - result = await fetchMorphoMarketSupplies(marketId, minAssets, pageSize, targetSkip); - } catch (morphoError) { - console.error('Failed to fetch supplies via Morpho API:', morphoError); - } - } - - // Fallback to Subgraph if Morpho API failed or not supported - if (!result) { - try { - console.log(`Attempting to fetch supplies via Subgraph for ${marketId} (page ${targetPage})`); - result = await fetchSubgraphMarketSupplies(marketId, loanAssetId, network, minAssets, pageSize, targetSkip); - } catch (subgraphError) { - console.error('Failed to fetch supplies via Subgraph:', subgraphError); - throw subgraphError; - } - } - - return result; + return fetchMarketSupplies(marketId, loanAssetId, network, minAssets, pageSize, targetSkip); }, [marketId, loanAssetId, network, minAssets, pageSize], ); diff --git a/src/hooks/useProcessedMarkets.ts b/src/hooks/useProcessedMarkets.ts index 2093409c..6cd729ec 100644 --- a/src/hooks/useProcessedMarkets.ts +++ b/src/hooks/useProcessedMarkets.ts @@ -4,35 +4,10 @@ import { useOracleDataQuery } from '@/hooks/queries/useOracleDataQuery'; import { useTokenPrices } from '@/hooks/useTokenPrices'; import { useBlacklistedMarkets } from '@/stores/useBlacklistedMarkets'; import { useAppSettings } from '@/stores/useAppSettings'; +import { collectTokenPriceInputsForMarkets, applyTokenPriceResolutionToMarkets } from '@/data-sources/shared/market-usd'; import { isForceUnwhitelisted } from '@/utils/markets'; -import { getTokenPriceKey } from '@/data-sources/morpho-api/prices'; -import { formatBalance } from '@/utils/balance'; -import type { TokenPriceInput } from '@/data-sources/morpho-api/prices'; import type { Market } from '@/utils/types'; -const hasPositiveAssets = (value?: string): boolean => { - if (!value) return false; - try { - return BigInt(value) > 0n; - } catch { - return false; - } -}; - -const isFiniteNumber = (value: number | null | undefined): value is number => { - return value !== null && value !== undefined && Number.isFinite(value); -}; - -const shouldComputeUsd = (usdValue: number | null | undefined, assets?: string): boolean => { - if (!isFiniteNumber(usdValue)) return hasPositiveAssets(assets); - if (usdValue === 0 && hasPositiveAssets(assets)) return true; - return false; -}; - -const computeUsdValue = (assets: string, decimals: number, price: number): number => { - return formatBalance(assets, decimals) * price; -}; - /** * Processes raw markets data with blacklist filtering and oracle enrichment. * @@ -104,88 +79,16 @@ export const useProcessedMarkets = () => { }, [rawMarketsFromQuery, allBlacklistedMarketKeys, getOracleData]); // Build token list for USD fallbacks only when needed - const tokensForUsdFallback = useMemo(() => { - if (!processedData.allMarkets.length) return []; + const tokensForUsdFallback = useMemo(() => collectTokenPriceInputsForMarkets(processedData.allMarkets), [processedData.allMarkets]); - const tokens: TokenPriceInput[] = []; - const seen = new Set(); - - const addToken = (address: string, chainId: number) => { - const key = getTokenPriceKey(address, chainId); - if (seen.has(key)) return; - seen.add(key); - tokens.push({ address, chainId }); - }; - - processedData.allMarkets.forEach((market) => { - const chainId = market.morphoBlue.chain.id; - - const needsLoanUsd = - shouldComputeUsd(market.state?.supplyAssetsUsd, market.state?.supplyAssets) || - shouldComputeUsd(market.state?.borrowAssetsUsd, market.state?.borrowAssets) || - shouldComputeUsd(market.state?.liquidityAssetsUsd, market.state?.liquidityAssets); - - const needsCollateralUsd = shouldComputeUsd(market.state?.collateralAssetsUsd ?? null, market.state?.collateralAssets); - - if (needsLoanUsd) { - addToken(market.loanAsset.address, chainId); - } - - if (needsCollateralUsd) { - addToken(market.collateralAsset.address, chainId); - } - }); - - return tokens; - }, [processedData.allMarkets]); - - const { prices: tokenPrices } = useTokenPrices(tokensForUsdFallback); + const { prices: tokenPrices, sources: tokenPriceSources } = useTokenPrices(tokensForUsdFallback); const allMarketsWithUsd = useMemo(() => { if (!processedData.allMarkets.length) return processedData.allMarkets; if (tokensForUsdFallback.length === 0 || tokenPrices.size === 0) return processedData.allMarkets; - return processedData.allMarkets.map((market) => { - const chainId = market.morphoBlue.chain.id; - const loanPrice = tokenPrices.get(getTokenPriceKey(market.loanAsset.address, chainId)); - const collateralPrice = tokenPrices.get(getTokenPriceKey(market.collateralAsset.address, chainId)); - - let nextState = market.state; - let changed = false; - - if (loanPrice !== undefined && Number.isFinite(loanPrice)) { - if (shouldComputeUsd(nextState.supplyAssetsUsd, nextState.supplyAssets)) { - nextState = { ...nextState, supplyAssetsUsd: computeUsdValue(nextState.supplyAssets, market.loanAsset.decimals, loanPrice) }; - changed = true; - } - if (shouldComputeUsd(nextState.borrowAssetsUsd, nextState.borrowAssets)) { - nextState = { ...nextState, borrowAssetsUsd: computeUsdValue(nextState.borrowAssets, market.loanAsset.decimals, loanPrice) }; - changed = true; - } - if (shouldComputeUsd(nextState.liquidityAssetsUsd, nextState.liquidityAssets)) { - nextState = { - ...nextState, - liquidityAssetsUsd: computeUsdValue(nextState.liquidityAssets, market.loanAsset.decimals, loanPrice), - }; - changed = true; - } - } - - if ( - collateralPrice !== undefined && - Number.isFinite(collateralPrice) && - shouldComputeUsd(nextState.collateralAssetsUsd ?? null, nextState.collateralAssets) - ) { - nextState = { - ...nextState, - collateralAssetsUsd: computeUsdValue(nextState.collateralAssets, market.collateralAsset.decimals, collateralPrice), - }; - changed = true; - } - - return changed ? { ...market, state: nextState } : market; - }); - }, [processedData.allMarkets, tokenPrices, tokensForUsdFallback]); + return applyTokenPriceResolutionToMarkets(processedData.allMarkets, tokenPrices, tokenPriceSources); + }, [processedData.allMarkets, tokenPriceSources, tokenPrices, tokensForUsdFallback]); const whitelistedMarketsWithUsd = useMemo(() => { return allMarketsWithUsd.filter((market) => market.whitelisted); diff --git a/src/hooks/useTokenPrices.ts b/src/hooks/useTokenPrices.ts index 68d0a9f3..01091cf3 100644 --- a/src/hooks/useTokenPrices.ts +++ b/src/hooks/useTokenPrices.ts @@ -3,6 +3,7 @@ import { useQuery } from '@tanstack/react-query'; import { fetchTokenPrices, type TokenPriceInput } from '@/data-sources/morpho-api/prices'; import { getTokenPriceKey } from '@/data-sources/morpho-api/prices'; import { findToken, TokenPeg, supportedTokens } from '@/utils/tokens'; +import type { MarketUsdPriceSource } from '@/utils/types'; import { fetchMajorPrices, type MajorPrices } from '@/utils/majorPrices'; // Query keys for token prices @@ -20,6 +21,7 @@ export const tokenPriceKeys = { type UseTokenPricesReturn = { prices: Map; + sources: Map; isLoading: boolean; error: Error | null; }; @@ -173,8 +175,36 @@ export const useTokenPrices = (tokens: TokenPriceInput[]): UseTokenPricesReturn return resolvedPrices; }, [prices, stableTokens, tokensWithPegRefs, majorPrices]); + const priceSources = useMemo(() => { + const basePrices = prices ?? new Map(); + const resolvedSources = new Map(); + + stableTokens.forEach((token) => { + const key = getTokenPriceKey(token.address, token.chainId); + const directPrice = basePrices.get(key); + + if (isFinitePositive(directPrice)) { + resolvedSources.set(key, 'direct'); + return; + } + + const meta = findToken(token.address, token.chainId); + if (!meta?.peg) { + return; + } + + const fallbackPrice = pricesWithFallback.get(key); + if (isFinitePositive(fallbackPrice)) { + resolvedSources.set(key, 'peg'); + } + }); + + return resolvedSources; + }, [prices, pricesWithFallback, stableTokens]); + return { prices: pricesWithFallback, + sources: priceSources, isLoading, error: error ?? null, }; diff --git a/src/hooks/useUserPosition.ts b/src/hooks/useUserPosition.ts index 40b81eae..0addc249 100644 --- a/src/hooks/useUserPosition.ts +++ b/src/hooks/useUserPosition.ts @@ -1,9 +1,7 @@ import { useQuery } from '@tanstack/react-query'; import type { Address } from 'viem'; import { usePublicClient } from 'wagmi'; -import { supportsMorphoApi } from '@/config/dataSources'; -import { fetchMorphoUserPositionForMarket } from '@/data-sources/morpho-api/positions'; -import { fetchSubgraphUserPositionForMarket } from '@/data-sources/subgraph/positions'; +import { fetchUserPositionForMarket } from '@/data-sources/user-position'; import type { SupportedNetworks } from '@/utils/networks'; import { fetchPositionSnapshot } from '@/utils/positions'; import type { MarketPosition } from '@/utils/types'; @@ -36,7 +34,6 @@ const useUserPosition = (user: string | undefined, chainId: SupportedNetworks | queryKey: queryKey, queryFn: async (): Promise => { if (!user || !chainId || !marketKey) { - console.log('Missing user, chainId, or marketKey for useUserPosition'); return null; } @@ -46,11 +43,9 @@ const useUserPosition = (user: string | undefined, chainId: SupportedNetworks | } // 1. Try fetching the on-chain snapshot first - console.log(`Attempting fetchPositionSnapshot for ${user} on market ${marketKey}`); let snapshot = null; try { snapshot = await fetchPositionSnapshot(marketKey, user as Address, chainId, undefined, publicClient); - console.log(`Snapshot result for ${marketKey}:`, snapshot ? 'Exists' : 'Null'); } catch (snapshotError) { console.error(`Error fetching position snapshot for ${user} on market ${marketKey}:`, snapshotError); // Snapshot fetch failed, will proceed to fallback fetch @@ -64,7 +59,6 @@ const useUserPosition = (user: string | undefined, chainId: SupportedNetworks | if (market) { // Local market data found, construct position directly - console.log(`Found local market data for ${marketKey}, constructing position from snapshot.`); finalPosition = { market: market, state: { @@ -79,29 +73,7 @@ const useUserPosition = (user: string | undefined, chainId: SupportedNetworks | } else { // Local market data NOT found, need to fetch from fallback to get structure console.warn(`Local market data not found for ${marketKey}. Fetching from fallback source to combine with snapshot.`); - let fallbackPosition: MarketPosition | null = null; - - // Try Morpho API first if supported - if (supportsMorphoApi(chainId)) { - try { - console.log(`Attempting to fetch position via Morpho API for ${marketKey}`); - fallbackPosition = await fetchMorphoUserPositionForMarket(marketKey, user, chainId); - } catch (morphoError) { - console.error('Failed to fetch position via Morpho API:', morphoError); - // Continue to Subgraph fallback - } - } - - // If Morpho API failed or not supported, try Subgraph - if (!fallbackPosition) { - try { - console.log(`Attempting to fetch position via Subgraph for ${marketKey}`); - fallbackPosition = await fetchSubgraphUserPositionForMarket(marketKey, user, chainId); - } catch (subgraphError) { - console.error('Failed to fetch position via Subgraph:', subgraphError); - fallbackPosition = null; - } - } + const fallbackPosition = await fetchUserPositionForMarket(marketKey, user, chainId); if (fallbackPosition) { // Fallback succeeded, combine with snapshot state @@ -123,32 +95,8 @@ const useUserPosition = (user: string | undefined, chainId: SupportedNetworks | } } else { // Snapshot failed, rely entirely on the fallback data source - console.log(`Snapshot failed for ${marketKey}, fetching from fallback source.`); - - // Try Morpho API first if supported - if (supportsMorphoApi(chainId)) { - try { - console.log(`Attempting to fetch position via Morpho API for ${marketKey}`); - finalPosition = await fetchMorphoUserPositionForMarket(marketKey, user, chainId); - } catch (morphoError) { - console.error('Failed to fetch position via Morpho API:', morphoError); - // Continue to Subgraph fallback - } - } - - // If Morpho API failed or not supported, try Subgraph - if (!finalPosition) { - try { - console.log(`Attempting to fetch position via Subgraph for ${marketKey}`); - finalPosition = await fetchSubgraphUserPositionForMarket(marketKey, user, chainId); - } catch (subgraphError) { - console.error('Failed to fetch position via Subgraph:', subgraphError); - finalPosition = null; - } - } + finalPosition = await fetchUserPositionForMarket(marketKey, user, chainId); } - - console.log(`Final position data for ${user} on market ${marketKey}:`, finalPosition ? 'Found' : 'Not Found'); // If finalPosition has zero balances, it's still a valid position state from the snapshot or fallback return finalPosition; }, diff --git a/src/hooks/useUserPositions.ts b/src/hooks/useUserPositions.ts index 55367a0f..d4c660c4 100644 --- a/src/hooks/useUserPositions.ts +++ b/src/hooks/useUserPositions.ts @@ -1,9 +1,8 @@ import { useCallback } from 'react'; import { useQuery, useQueryClient } from '@tanstack/react-query'; import type { Address } from 'viem'; -import { supportsMorphoApi } from '@/config/dataSources'; -import { fetchMorphoUserPositionMarkets } from '@/data-sources/morpho-api/positions'; -import { fetchSubgraphUserPositionMarkets } from '@/data-sources/subgraph/positions'; +import { fetchUserPositionMarkets } from '@/data-sources/position-markets'; +import { getChainScopedMarketKey } from '@/utils/marketIdentity'; import { SupportedNetworks } from '@/utils/networks'; import { fetchLatestPositionSnapshotsWithOraclePrices, type PositionSnapshot, type PositionMarketOracleInput } from '@/utils/positions'; import { getClient } from '@/utils/rpc'; @@ -46,58 +45,6 @@ export const positionKeys = { ] as const, }; -// --- Helper Fetch Function --- // - -// Fetches market keys ONLY from API/Subgraph sources -const fetchSourceMarketKeys = async (user: string, chainIds?: SupportedNetworks[]): Promise => { - const allSupportedNetworks = Object.values(SupportedNetworks).filter((value) => typeof value === 'number') as SupportedNetworks[]; - - // Filter to specific chains if provided - const networksToFetch = chainIds ?? allSupportedNetworks; - - const results = await Promise.allSettled( - networksToFetch.map(async (network) => { - let markets: PositionMarket[] = []; - let apiError = false; - const morphoApiSupported = supportsMorphoApi(network); - - // Try Morpho API first if supported - if (morphoApiSupported) { - try { - console.log(`Attempting to fetch positions via Morpho API for network ${network}`); - markets = await fetchMorphoUserPositionMarkets(user, network); - } catch (morphoError) { - console.error(`Failed to fetch positions via Morpho API for network ${network}:`, morphoError); - apiError = true; - // Continue to Subgraph fallback - } - } - - // If Morpho API failed or not supported, try Subgraph - if (markets.length === 0 && (!morphoApiSupported || apiError)) { - try { - console.log(`Attempting to fetch positions via Subgraph for network ${network}`); - markets = await fetchSubgraphUserPositionMarkets(user, network); - } catch (subgraphError) { - console.error(`Failed to fetch positions via Subgraph for network ${network}:`, subgraphError); - return []; - } - } - - return markets; - }), - ); - - let sourcePositionMarkets: PositionMarket[] = []; - results.forEach((result) => { - if (result.status === 'fulfilled') { - sourcePositionMarkets = sourcePositionMarkets.concat(result.value); - } - }); - - return sourcePositionMarkets; -}; - // --- Main Hook --- // const useUserPositions = (user: string | undefined, showEmpty = false, chainIds?: SupportedNetworks[]) => { @@ -120,8 +67,11 @@ const useUserPositions = (user: string | undefined, showEmpty = false, chainIds? // User is guaranteed non-null here due to the 'enabled' flag if (!user) throw new Error('Assertion failed: User should be defined here.'); - // Fetch keys from API/Subgraph - const sourceMarketKeys = await fetchSourceMarketKeys(user, chainIds); + const allSupportedNetworks = Object.values(SupportedNetworks).filter((value) => typeof value === 'number') as SupportedNetworks[]; + const networksToFetch = chainIds ?? allSupportedNetworks; + + // Fetch keys from the highest-priority source that supports this request shape. + const sourceMarketKeys = await fetchUserPositionMarkets(user, networksToFetch); // Get keys from cache and filter by chainIds if provided const cachedMarkets = getUserMarkets(); const filteredCachedMarkets = chainIds @@ -132,14 +82,13 @@ const useUserPositions = (user: string | undefined, showEmpty = false, chainIds? const combinedMarkets = [...sourceMarketKeys, ...filteredCachedMarkets]; const uniqueMarketsMap = new Map(); combinedMarkets.forEach((market) => { - const key = `${market.marketUniqueKey.toLowerCase()}-${market.chainId}`; + const key = getChainScopedMarketKey(market.marketUniqueKey, market.chainId); if (!uniqueMarketsMap.has(key)) { uniqueMarketsMap.set(key, market); } }); const finalMarketKeys = Array.from(uniqueMarketsMap.values()); - // console.log(`[Positions] Query 1: Final unique keys count: ${finalMarketKeys.length}`); return { finalMarketKeys }; }, enabled: !!user && allMarkets.length > 0, @@ -156,8 +105,6 @@ const useUserPositions = (user: string | undefined, showEmpty = false, chainIds? queryFn: async () => { if (!initialData || !user) throw new Error('Assertion failed: initialData/user should be defined here.'); - console.log('fetching enhanced positions with market keys'); - const { finalMarketKeys } = initialData; // Group markets by chainId for batched fetching @@ -171,7 +118,7 @@ const useUserPositions = (user: string | undefined, showEmpty = false, chainIds? // Build market data map from allMarkets context (no need to fetch individually) const marketDataMap = new Map(); allMarkets.forEach((market) => { - marketDataMap.set(market.uniqueKey.toLowerCase(), market); + marketDataMap.set(getChainScopedMarketKey(market.uniqueKey, market.morphoBlue.chain.id), market); }); // Fetch snapshots for each chain using batched multicall @@ -187,7 +134,7 @@ const useUserPositions = (user: string | undefined, showEmpty = false, chainIds? const marketInputs: PositionMarketOracleInput[] = markets.map((marketInfo) => ({ marketUniqueKey: marketInfo.marketUniqueKey, - oracleAddress: marketDataMap.get(marketInfo.marketUniqueKey.toLowerCase())?.oracleAddress ?? null, + oracleAddress: marketDataMap.get(getChainScopedMarketKey(marketInfo.marketUniqueKey, marketInfo.chainId))?.oracleAddress ?? null, })); const { snapshots, oraclePrices } = await fetchLatestPositionSnapshotsWithOraclePrices( marketInputs, @@ -198,10 +145,10 @@ const useUserPositions = (user: string | undefined, showEmpty = false, chainIds? // Merge into allSnapshots snapshots.forEach((snapshot, marketId) => { - allSnapshots.set(marketId.toLowerCase(), snapshot); + allSnapshots.set(getChainScopedMarketKey(marketId, chainId), snapshot); }); oraclePrices.forEach((oraclePrice, marketId) => { - allOraclePrices.set(marketId.toLowerCase(), oraclePrice); + allOraclePrices.set(getChainScopedMarketKey(marketId, chainId), oraclePrice); }); }), ); @@ -209,7 +156,7 @@ const useUserPositions = (user: string | undefined, showEmpty = false, chainIds? // Combine market data with snapshots const validPositions: EnhancedMarketPosition[] = []; finalMarketKeys.forEach((marketInfo) => { - const marketKey = marketInfo.marketUniqueKey.toLowerCase(); + const marketKey = getChainScopedMarketKey(marketInfo.marketUniqueKey, marketInfo.chainId); const market = marketDataMap.get(marketKey); const snapshot = allSnapshots.get(marketKey); diff --git a/src/utils/marketIdentity.ts b/src/utils/marketIdentity.ts new file mode 100644 index 00000000..f7f72a3a --- /dev/null +++ b/src/utils/marketIdentity.ts @@ -0,0 +1,3 @@ +export const getChainScopedMarketKey = (marketUniqueKey: string, chainId: number): string => { + return `${marketUniqueKey.toLowerCase()}-${chainId}`; +}; diff --git a/src/utils/rpc.ts b/src/utils/rpc.ts index 0376f4de..5b973580 100644 --- a/src/utils/rpc.ts +++ b/src/utils/rpc.ts @@ -4,6 +4,18 @@ import { getDefaultRPC, getViemChain, SupportedNetworks, hyperEvm } from './netw // Default clients (cached) let defaultClients: Partial> = {}; +const customClients = new Map(); + +const RPC_BATCH_WAIT_MS = 16; +const RPC_BATCH_SIZE = 1000; + +const createHttpTransport = (url: string) => + http(url, { + batch: { + batchSize: RPC_BATCH_SIZE, + wait: RPC_BATCH_WAIT_MS, + }, + }); // Initialize default clients const initializeDefaultClients = () => { @@ -11,31 +23,31 @@ const initializeDefaultClients = () => { defaultClients = { [SupportedNetworks.Mainnet]: createPublicClient({ chain: mainnet, - transport: http(getDefaultRPC(SupportedNetworks.Mainnet)), + transport: createHttpTransport(getDefaultRPC(SupportedNetworks.Mainnet)), }), [SupportedNetworks.Base]: createPublicClient({ chain: base, - transport: http(getDefaultRPC(SupportedNetworks.Base)), + transport: createHttpTransport(getDefaultRPC(SupportedNetworks.Base)), }) as PublicClient, [SupportedNetworks.Polygon]: createPublicClient({ chain: polygon, - transport: http(getDefaultRPC(SupportedNetworks.Polygon)), + transport: createHttpTransport(getDefaultRPC(SupportedNetworks.Polygon)), }), [SupportedNetworks.Unichain]: createPublicClient({ chain: unichain, - transport: http(getDefaultRPC(SupportedNetworks.Unichain)), + transport: createHttpTransport(getDefaultRPC(SupportedNetworks.Unichain)), }) as PublicClient, [SupportedNetworks.Arbitrum]: createPublicClient({ chain: arbitrum, - transport: http(getDefaultRPC(SupportedNetworks.Arbitrum)), + transport: createHttpTransport(getDefaultRPC(SupportedNetworks.Arbitrum)), }) as PublicClient, [SupportedNetworks.HyperEVM]: createPublicClient({ chain: hyperEvm, - transport: http(getDefaultRPC(SupportedNetworks.HyperEVM)), + transport: createHttpTransport(getDefaultRPC(SupportedNetworks.HyperEVM)), }) as PublicClient, [SupportedNetworks.Monad]: createPublicClient({ chain: monad, - transport: http(getDefaultRPC(SupportedNetworks.Monad)), + transport: createHttpTransport(getDefaultRPC(SupportedNetworks.Monad)), }) as PublicClient, }; } @@ -50,14 +62,23 @@ function createClientWithCustomRpc(chainId: SupportedNetworks, rpcUrl: string): return createPublicClient({ chain, - transport: http(rpcUrl), + transport: createHttpTransport(rpcUrl), }) as PublicClient; } // Get client with optional custom RPC URL export const getClient = (chainId: SupportedNetworks, customRpcUrl?: string): PublicClient => { if (customRpcUrl) { - return createClientWithCustomRpc(chainId, customRpcUrl); + const cacheKey = `${chainId}:${customRpcUrl}`; + const cachedClient = customClients.get(cacheKey); + + if (cachedClient) { + return cachedClient; + } + + const client = createClientWithCustomRpc(chainId, customRpcUrl); + customClients.set(cacheKey, client); + return client; } initializeDefaultClients(); diff --git a/src/utils/tokenCatalog.ts b/src/utils/tokenCatalog.ts new file mode 100644 index 00000000..329e526d --- /dev/null +++ b/src/utils/tokenCatalog.ts @@ -0,0 +1,129 @@ +import { z } from 'zod'; +import { SupportedNetworks, getViemChain } from '@/utils/networks'; +import { supportedTokens, type ERC20Token } from '@/utils/tokens'; + +const PendleAssetSchema = z.object({ + address: z.string(), + chainId: z.number(), + symbol: z.string(), + decimals: z.number(), + proIcon: z.string().nullable(), +}); + +type PendleAsset = z.infer; + +const TOKEN_CATALOG_TTL_MS = 5 * 60 * 1000; +const PENDLE_SUPPORTED_CHAIN_IDS = [ + SupportedNetworks.Mainnet, + SupportedNetworks.Base, + SupportedNetworks.Arbitrum, + SupportedNetworks.HyperEVM, +] as const; + +const localTokensWithSource: ERC20Token[] = supportedTokens.map((token) => ({ + ...token, + source: 'local', +})); + +let tokenCatalogCache: + | { + expiresAt: number; + promise: Promise; + } + | null = null; + +const fetchPendleAssets = async (chainId: number): Promise => { + try { + const response = await fetch(`https://api-v2.pendle.finance/core/v1/${chainId}/assets/all`); + + if (!response.ok) { + return []; + } + + const data = (await response.json()) as PendleAsset[]; + return z.array(PendleAssetSchema).parse(data); + } catch (error) { + console.error(`Error fetching Pendle assets for chain ${chainId}:`, error); + return []; + } +}; + +const convertPendleAssetToToken = (asset: PendleAsset, chainId: SupportedNetworks): ERC20Token => { + return { + decimals: asset.decimals, + img: asset.proIcon ?? undefined, + isFactoryToken: true, + networks: [ + { + address: asset.address, + chain: getViemChain(chainId), + }, + ], + protocol: { + name: 'Pendle', + }, + source: 'external', + symbol: asset.symbol, + }; +}; + +const mergeCatalogTokens = (externalTokens: ERC20Token[]): ERC20Token[] => { + const filteredExternalTokens = externalTokens.filter((externalToken) => { + return !externalToken.networks.some((externalNetwork) => + supportedTokens.some((supportedToken) => + supportedToken.networks.some( + (supportedNetwork) => + supportedNetwork.address.toLowerCase() === externalNetwork.address.toLowerCase() && + supportedNetwork.chain.id === externalNetwork.chain.id, + ), + ), + ); + }); + + return [...localTokensWithSource, ...filteredExternalTokens]; +}; + +export const getLocalTokenCatalog = (): ERC20Token[] => { + return localTokensWithSource; +}; + +export const fetchMergedTokenCatalog = async (): Promise => { + const now = Date.now(); + + if (tokenCatalogCache && tokenCatalogCache.expiresAt > now) { + return tokenCatalogCache.promise; + } + + const promise = (async () => { + try { + const externalTokenGroups = await Promise.all( + PENDLE_SUPPORTED_CHAIN_IDS.map(async (chainId) => { + const assets = await fetchPendleAssets(chainId); + return assets.map((asset) => convertPendleAssetToToken(asset, chainId)); + }), + ); + + return mergeCatalogTokens(externalTokenGroups.flat()); + } catch (error) { + tokenCatalogCache = null; + throw error; + } + })(); + + tokenCatalogCache = { + expiresAt: now + TOKEN_CATALOG_TTL_MS, + promise, + }; + + return promise; +}; + +export const findTokenInCatalog = (tokens: ERC20Token[], address: string, chainId: number): ERC20Token | undefined => { + if (!address || !chainId) { + return undefined; + } + + return tokens.find((token) => + token.networks.some((network) => network.address.toLowerCase() === address.toLowerCase() && network.chain.id === chainId), + ); +}; diff --git a/src/utils/tokens.ts b/src/utils/tokens.ts index d120f85f..11a5a44e 100644 --- a/src/utils/tokens.ts +++ b/src/utils/tokens.ts @@ -99,7 +99,7 @@ const supportedTokens = [ { symbol: 'USDA', img: require('../imgs/tokens/usda.png') as string, - decimals: 6, + decimals: 18, networks: [{ chain: mainnet, address: '0x0000206329b97DB379d5E1Bf586BbDB969C63274' }], peg: TokenPeg.USD, }, @@ -238,7 +238,7 @@ const supportedTokens = [ { symbol: 'EURCV', img: require('../imgs/tokens/eurcv.svg') as string, - decimals: 6, + decimals: 18, networks: [{ chain: mainnet, address: '0x5F7827FDeb7c20b443265Fc2F40845B715385Ff2' }], }, { @@ -387,7 +387,7 @@ const supportedTokens = [ { symbol: 'tBTC', img: require('../imgs/tokens/tbtc.webp') as string, - decimals: 8, + decimals: 18, networks: [{ chain: mainnet, address: '0x18084fbA666a33d37592fA2633fD49a74DD93a88' }], peg: TokenPeg.BTC, }, diff --git a/src/utils/types.ts b/src/utils/types.ts index 0f733b9d..64c90dc7 100644 --- a/src/utils/types.ts +++ b/src/utils/types.ts @@ -114,6 +114,8 @@ export type TokenInfo = { decimals: number; }; +export type MarketUsdPriceSource = 'direct' | 'peg' | 'none'; + type AssetType = { id: string; address: string; @@ -344,6 +346,7 @@ export type Market = { address: string; }[]; hasUSDPrice: boolean; + usdPriceSource: MarketUsdPriceSource; warnings: MarketWarning[]; oracle?: { data: MorphoChainlinkOracleData; From 7b00afee8d1d2696e51fc7245de0765df454f220 Mon Sep 17 00:00:00 2001 From: antoncoding Date: Sat, 14 Mar 2026 14:35:20 +0800 Subject: [PATCH 2/5] chore: review feedbacks --- AGENTS.md | 3 + src/data-sources/envio/market.ts | 12 +- src/data-sources/envio/positions.ts | 6 +- src/data-sources/market-activity.ts | 61 +++++-- src/data-sources/market-catalog.ts | 49 +++++- src/data-sources/market-details.ts | 5 + src/data-sources/market-historical.ts | 29 +++- src/data-sources/market-participants.ts | 41 ++++- src/data-sources/position-markets.ts | 16 +- .../shared/market-target-rate-enrichment.ts | 154 ++++++++++++++++++ src/data-sources/shared/token-metadata.ts | 108 +++++++----- src/data-sources/subgraph/historical.ts | 26 ++- src/data-sources/subgraph/market.ts | 3 +- src/data-sources/user-position.ts | 70 +++++++- src/hooks/queries/useMarketMetricsQuery.ts | 2 - src/hooks/useFeedLastUpdatedByChain.ts | 18 +- src/hooks/useFreshMarketsState.ts | 72 ++------ src/hooks/useMarketData.ts | 10 +- src/hooks/useReadOnlyClient.ts | 23 +++ src/hooks/useUserPosition.ts | 25 ++- src/hooks/useUserPositions.ts | 6 +- src/utils/positions.ts | 77 ++++++--- 22 files changed, 619 insertions(+), 197 deletions(-) create mode 100644 src/data-sources/shared/market-target-rate-enrichment.ts create mode 100644 src/hooks/useReadOnlyClient.ts diff --git a/AGENTS.md b/AGENTS.md index ef30427a..a5034d4a 100644 --- a/AGENTS.md +++ b/AGENTS.md @@ -171,6 +171,9 @@ When touching transaction and position flows, validation MUST include all releva 35. **USD price provenance integrity**: market-level USD display state must distinguish direct fetched token/API prices, peg-based hardcoded fallback estimates, and missing prices. Tooltip and trust gates must key off that explicit provenance, not a generic “has USD” boolean that conflates fallback and absence. 36. **RPC dedupe and batching integrity**: shared RPC chokepoints must cache per-endpoint viem clients, enable transport-level JSON-RPC batching for parallel reads, and dedupe in-flight chain-wide historical/token-metadata jobs so multiple consumers cannot multiply identical RPC bursts. 37. **Historical chart unit integrity**: shared historical adapters must keep each series on one explicit unit contract across all sources. Asset-volume series must not mix raw smallest-unit values and display-unit decimals between sources, and chart consumers must tolerate stale cached points during contract transitions. +38. **RPC configuration reactivity integrity**: any query or cache that depends on the active custom RPC selection must key or invalidate at the exact RPC-dependent layer (for example snapshot/enrichment queries), while RPC-independent discovery queries must not churn on RPC changes. Custom-RPC switching must not leave position or market state pinned to data fetched through the previous endpoint. +39. **Sparse market contract parity**: any fallback or single-entity market/position path that starts from sparse source data must hydrate missing market fields through the shared market-detail/catalog enrichment chokepoints before the result reaches shared UI consumers. Do not let raw fallback markets bypass shared USD, target-rate, blacklist, or chain-scoped identity normalization. +40. **Indexer market pre-hydration integrity**: shared market-catalog/indexer adapters must exclude structurally invalid markets (for example zero-address IRM or collateral token, plus local blacklist gates) before token-metadata hydration or enrichment begins, and source logs must distinguish raw fetch completion from downstream enrichment so slow stages are attributable. ### REQUIRED: Regression Rule Capture diff --git a/src/data-sources/envio/market.ts b/src/data-sources/envio/market.ts index bb947b16..daf0a5a2 100644 --- a/src/data-sources/envio/market.ts +++ b/src/data-sources/envio/market.ts @@ -83,6 +83,7 @@ const withVisibleMarketsFilter = (where: Record): Record): Record { + return normalizeAddress(market.collateralToken) === zeroAddress || normalizeAddress(market.irm) === zeroAddress; +}; + const fetchEnvioMarketsPage = async ({ limit, offset, @@ -244,7 +254,7 @@ const buildEnvioMarketsMap = async ( customRpcUrls?: CustomRpcUrls; } = {}, ): Promise> => { - const visibleRows = rows.filter((market) => !isTokenBlacklistedMarket(market)); + const visibleRows = rows.filter((market) => !hasExcludedEnvioAddresses(market) && !isTokenBlacklistedMarket(market)); const tokenMetadataMap = await fetchTokenMetadataMap(toMarketTokenRefs(visibleRows), options.customRpcUrls); const marketsByKey = new Map(); diff --git a/src/data-sources/envio/positions.ts b/src/data-sources/envio/positions.ts index 77f96e1b..9ede5b5c 100644 --- a/src/data-sources/envio/positions.ts +++ b/src/data-sources/envio/positions.ts @@ -1,4 +1,5 @@ import type { MarketPosition } from '@/utils/types'; +import type { CustomRpcUrls } from '@/stores/useCustomRpc'; import { ALL_SUPPORTED_NETWORKS, type SupportedNetworks } from '@/utils/networks'; import { envioPositionForMarketQuery, envioPositionsQuery } from '@/graphql/envio-queries'; import { fetchEnvioMarket } from './market'; @@ -111,6 +112,9 @@ export const fetchEnvioUserPositionForMarket = async ( marketUniqueKey: string, userAddress: string, chainId: SupportedNetworks, + options: { + customRpcUrls?: CustomRpcUrls; + } = {}, ): Promise => { const response = await envioGraphqlFetcher( envioPositionForMarketQuery, @@ -130,7 +134,7 @@ export const fetchEnvioUserPositionForMarket = async ( return null; } - const market = await fetchEnvioMarket(marketUniqueKey, chainId); + const market = await fetchEnvioMarket(marketUniqueKey, chainId, options); if (!market) { return null; diff --git a/src/data-sources/market-activity.ts b/src/data-sources/market-activity.ts index 28ce96cc..6a69a9f1 100644 --- a/src/data-sources/market-activity.ts +++ b/src/data-sources/market-activity.ts @@ -7,6 +7,7 @@ import { import { fetchMorphoMarketBorrows } from '@/data-sources/morpho-api/market-borrows'; import { fetchMorphoMarketLiquidations } from '@/data-sources/morpho-api/market-liquidations'; import { fetchMorphoMarketSupplies } from '@/data-sources/morpho-api/market-supplies'; +import { getErrorMessage, logDataSourceEvent } from '@/data-sources/shared/source-debug'; import { fetchSubgraphMarketBorrows } from '@/data-sources/subgraph/market-borrows'; import { fetchSubgraphMarketLiquidations } from '@/data-sources/subgraph/market-liquidations'; import { fetchSubgraphMarketSupplies } from '@/data-sources/subgraph/market-supplies'; @@ -24,19 +25,31 @@ export const fetchMarketSupplies = async ( if (hasEnvioIndexer()) { try { return await fetchEnvioMarketSupplies(marketId, network, minAssets, pageSize, skip); - } catch (envioError) { - console.error('Failed to fetch supplies via Envio:', envioError); + } catch (error) { + logDataSourceEvent('market-supplies', 'Envio supplies fetch failed, falling back', { + chainId: network, + marketUniqueKey: marketId, + reason: getErrorMessage(error), + }); } } if (supportsMorphoApi(network)) { try { return await fetchMorphoMarketSupplies(marketId, minAssets, pageSize, skip); - } catch (morphoError) { - console.error('Failed to fetch supplies via Morpho API:', morphoError); + } catch (error) { + logDataSourceEvent('market-supplies', 'Morpho API supplies fetch failed, falling back to subgraph', { + chainId: network, + marketUniqueKey: marketId, + reason: getErrorMessage(error), + }); } } + logDataSourceEvent('market-supplies', 'using subgraph fallback for supplies', { + chainId: network, + marketUniqueKey: marketId, + }); return fetchSubgraphMarketSupplies(marketId, loanAssetId, network, minAssets, pageSize, skip); }; @@ -51,19 +64,31 @@ export const fetchMarketBorrows = async ( if (hasEnvioIndexer()) { try { return await fetchEnvioMarketBorrows(marketId, network, minAssets, pageSize, skip); - } catch (envioError) { - console.error('Failed to fetch borrows via Envio:', envioError); + } catch (error) { + logDataSourceEvent('market-borrows', 'Envio borrows fetch failed, falling back', { + chainId: network, + marketUniqueKey: marketId, + reason: getErrorMessage(error), + }); } } if (supportsMorphoApi(network)) { try { return await fetchMorphoMarketBorrows(marketId, minAssets, pageSize, skip); - } catch (morphoError) { - console.error('Failed to fetch borrows via Morpho API:', morphoError); + } catch (error) { + logDataSourceEvent('market-borrows', 'Morpho API borrows fetch failed, falling back to subgraph', { + chainId: network, + marketUniqueKey: marketId, + reason: getErrorMessage(error), + }); } } + logDataSourceEvent('market-borrows', 'using subgraph fallback for borrows', { + chainId: network, + marketUniqueKey: marketId, + }); return fetchSubgraphMarketBorrows(marketId, loanAssetId, network, minAssets, pageSize, skip); }; @@ -74,18 +99,30 @@ export const fetchMarketLiquidations = async ( if (hasEnvioIndexer()) { try { return await fetchEnvioMarketLiquidations(marketId, network); - } catch (envioError) { - console.error('Failed to fetch liquidations via Envio:', envioError); + } catch (error) { + logDataSourceEvent('market-liquidations', 'Envio liquidations fetch failed, falling back', { + chainId: network, + marketUniqueKey: marketId, + reason: getErrorMessage(error), + }); } } if (supportsMorphoApi(network)) { try { return await fetchMorphoMarketLiquidations(marketId); - } catch (morphoError) { - console.error('Failed to fetch liquidations via Morpho API:', morphoError); + } catch (error) { + logDataSourceEvent('market-liquidations', 'Morpho API liquidations fetch failed, falling back to subgraph', { + chainId: network, + marketUniqueKey: marketId, + reason: getErrorMessage(error), + }); } } + logDataSourceEvent('market-liquidations', 'using subgraph fallback for liquidations', { + chainId: network, + marketUniqueKey: marketId, + }); return fetchSubgraphMarketLiquidations(marketId, network); }; diff --git a/src/data-sources/market-catalog.ts b/src/data-sources/market-catalog.ts index 7a992099..2597537d 100644 --- a/src/data-sources/market-catalog.ts +++ b/src/data-sources/market-catalog.ts @@ -4,6 +4,7 @@ import { fetchMorphoMarkets, fetchMorphoMarketsMultiChain } from '@/data-sources import { mergeMarketsByIdentity } from '@/data-sources/shared/market-merge'; import { filterTokenBlacklistedMarkets } from '@/data-sources/shared/market-visibility'; import { enrichMarketsWithHistoricalApysWithinTimeout } from '@/data-sources/shared/market-rate-enrichment'; +import { enrichMarketsWithTargetRate } from '@/data-sources/shared/market-target-rate-enrichment'; import { getErrorMessage, logDataSourceEvent } from '@/data-sources/shared/source-debug'; import { fetchSubgraphMarkets } from '@/data-sources/subgraph/market'; import type { CustomRpcUrls } from '@/stores/useCustomRpc'; @@ -12,6 +13,31 @@ import type { Market } from '@/utils/types'; const MARKET_ENRICHMENT_TIMEOUT_MS = 8_000; +const enrichCatalogMarkets = async (markets: Market[], customRpcUrls?: CustomRpcUrls): Promise => { + const marketsWithTargetRate = await enrichMarketsWithTargetRate(markets, { + customRpcUrls, + }); + + return enrichMarketsWithHistoricalApysWithinTimeout(marketsWithTargetRate, MARKET_ENRICHMENT_TIMEOUT_MS, customRpcUrls); +}; + +const enrichCatalogMarketsWithLogging = async ( + markets: Market[], + customRpcUrls: CustomRpcUrls | undefined, + details: Record, +): Promise => { + const enrichmentStartedAt = Date.now(); + const enrichedMarkets = await enrichCatalogMarkets(markets, customRpcUrls); + + logDataSourceEvent('market-catalog', 'market enrichment completed', { + ...details, + count: enrichedMarkets.length, + durationMs: Date.now() - enrichmentStartedAt, + }); + + return enrichedMarkets; +}; + const getMissingChainIds = (chainIds: SupportedNetworks[], markets: Market[]): SupportedNetworks[] => { const coveredChainIds = new Set(markets.map((market) => market.morphoBlue.chain.id)); return chainIds.filter((chainId) => !coveredChainIds.has(chainId)); @@ -45,25 +71,32 @@ export const fetchMarketCatalog = async ( if (hasEnvioIndexer()) { try { + const envioFetchStartedAt = Date.now(); const envioMarkets = await fetchEnvioMarkets(chainIds, { customRpcUrls, }); + const envioFetchDurationMs = Date.now() - envioFetchStartedAt; const missingChainIds = getMissingChainIds(chainIds, envioMarkets); if (missingChainIds.length === 0 && envioMarkets.length > 0) { - logDataSourceEvent('market-catalog', 'using Envio as primary source', { + logDataSourceEvent('market-catalog', 'Envio fetch completed; using Envio as primary source', { chainIds: chainIds.join(','), count: envioMarkets.length, + durationMs: envioFetchDurationMs, }); - return enrichMarketsWithHistoricalApysWithinTimeout(envioMarkets, MARKET_ENRICHMENT_TIMEOUT_MS, customRpcUrls); + return enrichCatalogMarketsWithLogging(envioMarkets, customRpcUrls, { + chainIds: chainIds.join(','), + source: 'envio-primary', + }); } - logDataSourceEvent('market-catalog', 'Envio returned incomplete coverage, falling back for missing chains only', { + logDataSourceEvent('market-catalog', 'Envio fetch completed with incomplete coverage; falling back for missing chains only', { requestedChainIds: chainIds.join(','), coveredChainIds: [...new Set(envioMarkets.map((market) => market.morphoBlue.chain.id))].join(','), missingChainIds: missingChainIds.join(','), envioCount: envioMarkets.length, + durationMs: envioFetchDurationMs, }); const fallbackMarkets = missingChainIds.length > 0 ? await fetchMarketsPerNetworkFallback(missingChainIds) : []; @@ -75,7 +108,10 @@ export const fetchMarketCatalog = async ( totalCount: mergedMarkets.length, }); - return enrichMarketsWithHistoricalApysWithinTimeout(mergedMarkets, MARKET_ENRICHMENT_TIMEOUT_MS, customRpcUrls); + return enrichCatalogMarketsWithLogging(mergedMarkets, customRpcUrls, { + chainIds: chainIds.join(','), + source: 'envio-merged-fallback', + }); } } catch (error) { logDataSourceEvent('market-catalog', 'Envio market catalog failed, using legacy fallback', { @@ -120,7 +156,10 @@ export const fetchMarketCatalog = async ( const mergedMarkets = mergeMarketsByIdentity(markets); if (mergedMarkets.length > 0) { - return enrichMarketsWithHistoricalApysWithinTimeout(mergedMarkets, MARKET_ENRICHMENT_TIMEOUT_MS, customRpcUrls); + return enrichCatalogMarketsWithLogging(mergedMarkets, customRpcUrls, { + chainIds: chainIds.join(','), + source: 'legacy-fallback', + }); } return fetchMarketsPerNetworkFallback(chainIds); diff --git a/src/data-sources/market-details.ts b/src/data-sources/market-details.ts index fbb17022..d82a725d 100644 --- a/src/data-sources/market-details.ts +++ b/src/data-sources/market-details.ts @@ -4,6 +4,7 @@ import { fetchMorphoMarket } from '@/data-sources/morpho-api/market'; import { isTokenBlacklistedMarket } from '@/data-sources/shared/market-visibility'; import { fetchSubgraphMarket } from '@/data-sources/subgraph/market'; import { enrichMarketsWithHistoricalApysWithinTimeout } from '@/data-sources/shared/market-rate-enrichment'; +import { enrichMarketsWithTargetRate } from '@/data-sources/shared/market-target-rate-enrichment'; import { fillMissingMarketUsdValues } from '@/data-sources/shared/market-usd'; import { getErrorMessage, logDataSourceEvent } from '@/data-sources/shared/source-debug'; import type { CustomRpcUrls } from '@/stores/useCustomRpc'; @@ -96,6 +97,10 @@ export const fetchMarketDetails = async ( const [marketWithUsd] = await fillMissingMarketUsdValues([baseMarket]); baseMarket = marketWithUsd ?? baseMarket; + const [marketWithTargetRate] = await enrichMarketsWithTargetRate([baseMarket], { + customRpcUrls, + }); + baseMarket = marketWithTargetRate ?? baseMarket; if (!enrichHistoricalApys) { return baseMarket; diff --git a/src/data-sources/market-historical.ts b/src/data-sources/market-historical.ts index 4e7cdd26..46a36d26 100644 --- a/src/data-sources/market-historical.ts +++ b/src/data-sources/market-historical.ts @@ -1,6 +1,7 @@ import { hasEnvioIndexer, supportsMorphoApi } from '@/config/dataSources'; import { fetchEnvioMarketHistoricalData } from '@/data-sources/envio/historical'; import { fetchMorphoMarketHistoricalData, type HistoricalDataSuccessResult } from '@/data-sources/morpho-api/historical'; +import { getErrorMessage, logDataSourceEvent } from '@/data-sources/shared/source-debug'; import { fetchSubgraphMarketHistoricalData } from '@/data-sources/subgraph/historical'; import type { CustomRpcUrls } from '@/stores/useCustomRpc'; import type { SupportedNetworks } from '@/utils/networks'; @@ -23,10 +24,18 @@ export const fetchMarketHistoricalData = async ( }); if (envioData) { + logDataSourceEvent('market-historical', 'using Envio historical source', { + chainId: network, + marketUniqueKey: uniqueKey, + }); return envioData; } - } catch (envioError) { - console.error('Failed to fetch historical data via Envio:', envioError); + } catch (error) { + logDataSourceEvent('market-historical', 'Envio historical fetch failed, falling back', { + chainId: network, + marketUniqueKey: uniqueKey, + reason: getErrorMessage(error), + }); } } @@ -35,12 +44,24 @@ export const fetchMarketHistoricalData = async ( const morphoData = await fetchMorphoMarketHistoricalData(uniqueKey, network, options); if (morphoData) { + logDataSourceEvent('market-historical', 'using Morpho API fallback for historical data', { + chainId: network, + marketUniqueKey: uniqueKey, + }); return morphoData; } - } catch (morphoError) { - console.error('Failed to fetch historical data via Morpho API:', morphoError); + } catch (error) { + logDataSourceEvent('market-historical', 'Morpho historical fetch failed, falling back to subgraph', { + chainId: network, + marketUniqueKey: uniqueKey, + reason: getErrorMessage(error), + }); } } + logDataSourceEvent('market-historical', 'using subgraph fallback for historical data', { + chainId: network, + marketUniqueKey: uniqueKey, + }); return fetchSubgraphMarketHistoricalData(uniqueKey, network, options); }; diff --git a/src/data-sources/market-participants.ts b/src/data-sources/market-participants.ts index 3bdda170..9db3da8d 100644 --- a/src/data-sources/market-participants.ts +++ b/src/data-sources/market-participants.ts @@ -2,6 +2,7 @@ import { hasEnvioIndexer, supportsMorphoApi } from '@/config/dataSources'; import { fetchEnvioMarketBorrowers, fetchEnvioMarketSuppliers } from '@/data-sources/envio/market-participants'; import { fetchMorphoMarketBorrowers } from '@/data-sources/morpho-api/market-borrowers'; import { fetchMorphoMarketSuppliers } from '@/data-sources/morpho-api/market-suppliers'; +import { getErrorMessage, logDataSourceEvent } from '@/data-sources/shared/source-debug'; import { fetchSubgraphMarketBorrowers } from '@/data-sources/subgraph/market-borrowers'; import { fetchSubgraphMarketSuppliers } from '@/data-sources/subgraph/market-suppliers'; import type { SupportedNetworks } from '@/utils/networks'; @@ -17,19 +18,31 @@ export const fetchMarketBorrowers = async ( if (hasEnvioIndexer()) { try { return await fetchEnvioMarketBorrowers(marketId, network, minShares, pageSize, skip); - } catch (envioError) { - console.error('Failed to fetch borrowers via Envio:', envioError); + } catch (error) { + logDataSourceEvent('market-borrowers', 'Envio borrowers fetch failed, falling back', { + chainId: network, + marketUniqueKey: marketId, + reason: getErrorMessage(error), + }); } } if (supportsMorphoApi(network)) { try { return await fetchMorphoMarketBorrowers(marketId, Number(network), minShares, pageSize, skip); - } catch (morphoError) { - console.error('Failed to fetch borrowers via Morpho API:', morphoError); + } catch (error) { + logDataSourceEvent('market-borrowers', 'Morpho API borrowers fetch failed, falling back to subgraph', { + chainId: network, + marketUniqueKey: marketId, + reason: getErrorMessage(error), + }); } } + logDataSourceEvent('market-borrowers', 'using subgraph fallback for borrowers', { + chainId: network, + marketUniqueKey: marketId, + }); return fetchSubgraphMarketBorrowers(marketId, network, minShares, pageSize, skip); }; @@ -43,18 +56,30 @@ export const fetchMarketSuppliers = async ( if (hasEnvioIndexer()) { try { return await fetchEnvioMarketSuppliers(marketId, network, minShares, pageSize, skip); - } catch (envioError) { - console.error('Failed to fetch suppliers via Envio:', envioError); + } catch (error) { + logDataSourceEvent('market-suppliers', 'Envio suppliers fetch failed, falling back', { + chainId: network, + marketUniqueKey: marketId, + reason: getErrorMessage(error), + }); } } if (supportsMorphoApi(network)) { try { return await fetchMorphoMarketSuppliers(marketId, Number(network), minShares, pageSize, skip); - } catch (morphoError) { - console.error('Failed to fetch suppliers via Morpho API:', morphoError); + } catch (error) { + logDataSourceEvent('market-suppliers', 'Morpho API suppliers fetch failed, falling back to subgraph', { + chainId: network, + marketUniqueKey: marketId, + reason: getErrorMessage(error), + }); } } + logDataSourceEvent('market-suppliers', 'using subgraph fallback for suppliers', { + chainId: network, + marketUniqueKey: marketId, + }); return fetchSubgraphMarketSuppliers(marketId, network, minShares, pageSize, skip); }; diff --git a/src/data-sources/position-markets.ts b/src/data-sources/position-markets.ts index 227a9a5a..96cb2e37 100644 --- a/src/data-sources/position-markets.ts +++ b/src/data-sources/position-markets.ts @@ -1,6 +1,7 @@ import { hasEnvioIndexer, supportsMorphoApi } from '@/config/dataSources'; import { fetchEnvioUserPositionMarkets } from '@/data-sources/envio/positions'; import { fetchMorphoUserPositionMarkets } from '@/data-sources/morpho-api/positions'; +import { getErrorMessage, logDataSourceEvent } from '@/data-sources/shared/source-debug'; import { fetchSubgraphUserPositionMarkets } from '@/data-sources/subgraph/positions'; import { getChainScopedMarketKey } from '@/utils/marketIdentity'; import { ALL_SUPPORTED_NETWORKS, type SupportedNetworks } from '@/utils/networks'; @@ -50,11 +51,22 @@ export const fetchUserPositionMarkets = async ( ): Promise => { if (hasEnvioIndexer()) { try { - return dedupePositionMarkets(await fetchEnvioUserPositionMarkets(user, chainIds)); + const positionMarkets = dedupePositionMarkets(await fetchEnvioUserPositionMarkets(user, chainIds)); + logDataSourceEvent('position-markets', 'using Envio cross-chain position discovery', { + chainIds: chainIds.join(','), + count: positionMarkets.length, + }); + return positionMarkets; } catch (error) { - console.error('[positions] Envio cross-chain fetch failed, falling back to per-network sources:', error); + logDataSourceEvent('position-markets', 'Envio cross-chain position discovery failed, falling back', { + chainIds: chainIds.join(','), + reason: getErrorMessage(error), + }); } } + logDataSourceEvent('position-markets', 'using per-network position discovery fallback', { + chainIds: chainIds.join(','), + }); return fetchPositionMarketsPerNetworkFallback(user, chainIds); }; diff --git a/src/data-sources/shared/market-target-rate-enrichment.ts b/src/data-sources/shared/market-target-rate-enrichment.ts new file mode 100644 index 00000000..a3c5ae22 --- /dev/null +++ b/src/data-sources/shared/market-target-rate-enrichment.ts @@ -0,0 +1,154 @@ +import { AdaptiveCurveIrmLib, MarketUtils } from '@morpho-org/blue-sdk'; +import type { Address } from 'viem'; +import type { CustomRpcUrls } from '@/stores/useCustomRpc'; +import { getChainScopedMarketKey } from '@/utils/marketIdentity'; +import type { SupportedNetworks } from '@/utils/networks'; +import { getClient } from '@/utils/rpc'; +import type { Market } from '@/utils/types'; +import { filterTokenBlacklistedMarkets } from './market-visibility'; + +const adaptiveCurveIrmAbi = [ + { + inputs: [{ name: 'id', type: 'bytes32' }], + name: 'rateAtTarget', + outputs: [{ name: '', type: 'int256' }], + stateMutability: 'view', + type: 'function', + }, +] as const; + +const TARGET_RATE_CHUNK_SIZE = 500; + +type TargetRateEnrichmentOptions = { + customRpcUrls?: CustomRpcUrls; +}; + +const normalizeRateAtTarget = (value: bigint | null | undefined): bigint => { + if (typeof value === 'bigint' && value > 0n) { + return value; + } + + return AdaptiveCurveIrmLib.INITIAL_RATE_AT_TARGET; +}; + +const parseStoredRateAtTarget = (value: string): bigint | null => { + try { + return BigInt(value); + } catch { + return null; + } +}; + +export const marketNeedsTargetRateEnrichment = (market: Market): boolean => { + const storedRateAtTarget = parseStoredRateAtTarget(market.state.rateAtTarget); + + return storedRateAtTarget == null || storedRateAtTarget <= 0n || market.state.apyAtTarget <= 0; +}; + +const applyTargetRate = (market: Market, rateAtTarget: bigint): Market => { + const normalizedRateAtTarget = normalizeRateAtTarget(rateAtTarget); + const nextRateAtTarget = normalizedRateAtTarget.toString(); + const nextApyAtTarget = MarketUtils.rateToApy(normalizedRateAtTarget); + + if (market.state.rateAtTarget === nextRateAtTarget && market.state.apyAtTarget === nextApyAtTarget) { + return market; + } + + return { + ...market, + state: { + ...market.state, + apyAtTarget: nextApyAtTarget, + rateAtTarget: nextRateAtTarget, + }, + }; +}; + +const enrichChainMarketsWithTargetRate = async ( + chainId: SupportedNetworks, + markets: Market[], + options: TargetRateEnrichmentOptions, +): Promise => { + if (markets.length === 0) { + return markets; + } + + const client = getClient(chainId, options.customRpcUrls?.[chainId]); + const enrichedMarkets: Market[] = []; + + for (let index = 0; index < markets.length; index += TARGET_RATE_CHUNK_SIZE) { + const marketBatch = markets.slice(index, index + TARGET_RATE_CHUNK_SIZE); + const results = await client.multicall({ + allowFailure: true, + contracts: marketBatch.map((market) => ({ + abi: adaptiveCurveIrmAbi, + address: market.irmAddress as Address, + args: [market.uniqueKey as `0x${string}`], + functionName: 'rateAtTarget', + })), + }); + + results.forEach((result, resultIndex) => { + const market = marketBatch[resultIndex]; + + if (!market) { + return; + } + + if (result.status !== 'success' || typeof result.result !== 'bigint') { + enrichedMarkets.push(market); + return; + } + + enrichedMarkets.push(applyTargetRate(market, result.result)); + }); + } + + return enrichedMarkets; +}; + +export const enrichMarketsWithTargetRate = async ( + markets: Market[], + options: TargetRateEnrichmentOptions = {}, +): Promise => { + const visibleMarkets = filterTokenBlacklistedMarkets(markets); + const marketsByChain = new Map(); + + for (const market of visibleMarkets) { + if (!marketNeedsTargetRateEnrichment(market)) { + continue; + } + + const chainMarkets = marketsByChain.get(market.morphoBlue.chain.id) ?? []; + chainMarkets.push(market); + marketsByChain.set(market.morphoBlue.chain.id, chainMarkets); + } + + if (marketsByChain.size === 0) { + return markets; + } + + const chainResults = await Promise.allSettled( + Array.from(marketsByChain.entries()).map(async ([chainId, chainMarkets]) => ({ + chainId, + markets: await enrichChainMarketsWithTargetRate(chainId, chainMarkets, options), + })), + ); + + const enrichedByUniqueKey = new Map(); + + for (const result of chainResults) { + if (result.status !== 'fulfilled') { + continue; + } + + for (const market of result.value.markets) { + enrichedByUniqueKey.set(getChainScopedMarketKey(market.uniqueKey, market.morphoBlue.chain.id), market); + } + } + + return markets.map((market) => { + const marketKey = getChainScopedMarketKey(market.uniqueKey, market.morphoBlue.chain.id); + return enrichedByUniqueKey.get(marketKey) ?? market; + }); +}; diff --git a/src/data-sources/shared/token-metadata.ts b/src/data-sources/shared/token-metadata.ts index 3d523d47..6b73f024 100644 --- a/src/data-sources/shared/token-metadata.ts +++ b/src/data-sources/shared/token-metadata.ts @@ -8,6 +8,7 @@ import type { TokenInfo } from '@/utils/types'; const DEFAULT_TOKEN_DECIMALS = 18; const UNKNOWN_TOKEN_NAME = 'Unknown Token'; +const TOKEN_METADATA_ADDRESSES_PER_MULTICALL = 100; const resolvedTokenMetadataCache = new Map(); const pendingTokenMetadataCache = new Map>(); @@ -59,6 +60,16 @@ const createDeferredTokenInfo = (): DeferredTokenInfo => { return { promise, resolve }; }; +const chunkAddresses = (addresses: string[]): string[][] => { + const chunks: string[][] = []; + + for (let index = 0; index < addresses.length; index += TOKEN_METADATA_ADDRESSES_PER_MULTICALL) { + chunks.push(addresses.slice(index, index + TOKEN_METADATA_ADDRESSES_PER_MULTICALL)); + } + + return chunks; +}; + export const fetchTokenMetadataMap = async ( tokenRefs: { address: string; chainId: SupportedNetworks }[], customRpcUrls?: CustomRpcUrls, @@ -123,47 +134,62 @@ export const fetchTokenMetadataMap = async ( } try { - const contracts = uniqueAddresses.flatMap((address) => [ - { - abi: erc20Abi, - address: address as Address, - functionName: 'symbol' as const, - }, - { - abi: erc20Abi, - address: address as Address, - functionName: 'name' as const, - }, - { - abi: erc20Abi, - address: address as Address, - functionName: 'decimals' as const, - }, - ]); - - const results = await client.multicall({ - allowFailure: true, - contracts, - }); - - for (const [index, address] of uniqueAddresses.entries()) { - const symbolResult = results[index * 3]; - const nameResult = results[index * 3 + 1]; - const decimalsResult = results[index * 3 + 2]; - - const tokenInfo = createFallbackTokenInfo(address, { - decimals: - decimalsResult?.status === 'success' && typeof decimalsResult.result === 'number' - ? decimalsResult.result - : DEFAULT_TOKEN_DECIMALS, - name: nameResult?.status === 'success' && typeof nameResult.result === 'string' ? nameResult.result : UNKNOWN_TOKEN_NAME, - symbol: symbolResult?.status === 'success' && typeof symbolResult.result === 'string' ? symbolResult.result : 'Unknown', - }); - - const key = infoToKey(address, chainId); - resolvedTokenMetadataCache.set(key, tokenInfo); - metadataMap.set(key, tokenInfo); - deferredByKey.get(key)?.resolve(tokenInfo); + for (const addressChunk of chunkAddresses(uniqueAddresses)) { + try { + const contracts = addressChunk.flatMap((address) => [ + { + abi: erc20Abi, + address: address as Address, + functionName: 'symbol' as const, + }, + { + abi: erc20Abi, + address: address as Address, + functionName: 'name' as const, + }, + { + abi: erc20Abi, + address: address as Address, + functionName: 'decimals' as const, + }, + ]); + + const results = await client.multicall({ + allowFailure: true, + contracts, + }); + + for (const [index, address] of addressChunk.entries()) { + const symbolResult = results[index * 3]; + const nameResult = results[index * 3 + 1]; + const decimalsResult = results[index * 3 + 2]; + + const tokenInfo = createFallbackTokenInfo(address, { + decimals: + decimalsResult?.status === 'success' && typeof decimalsResult.result === 'number' + ? decimalsResult.result + : DEFAULT_TOKEN_DECIMALS, + name: + nameResult?.status === 'success' && typeof nameResult.result === 'string' + ? nameResult.result + : UNKNOWN_TOKEN_NAME, + symbol: symbolResult?.status === 'success' && typeof symbolResult.result === 'string' ? symbolResult.result : 'Unknown', + }); + + const key = infoToKey(address, chainId); + resolvedTokenMetadataCache.set(key, tokenInfo); + metadataMap.set(key, tokenInfo); + deferredByKey.get(key)?.resolve(tokenInfo); + } + } catch { + for (const address of addressChunk) { + const key = infoToKey(address, chainId); + const tokenInfo = createFallbackTokenInfo(address); + resolvedTokenMetadataCache.set(key, tokenInfo); + metadataMap.set(key, tokenInfo); + deferredByKey.get(key)?.resolve(tokenInfo); + } + } } } catch { for (const address of uniqueAddresses) { diff --git a/src/data-sources/subgraph/historical.ts b/src/data-sources/subgraph/historical.ts index a54ce04f..90f0f317 100644 --- a/src/data-sources/subgraph/historical.ts +++ b/src/data-sources/subgraph/historical.ts @@ -37,6 +37,16 @@ type SubgraphMarketHourlySnapshotQueryResponse = { }; // --- End Subgraph Specific Types --- +const safeParseFloat = (value: string | null | undefined): number => { + if (!value) { + return 0; + } + + const parsed = Number.parseFloat(value); + return Number.isFinite(parsed) ? parsed : 0; +}; +const UTILIZATION_SCALE = 1_000_000n; + // Transformation function (simplified) const transformSubgraphSnapshotsToHistoricalResult = ( snapshots: SubgraphMarketHourlySnapshot[], // Expect non-empty array here @@ -79,16 +89,22 @@ const transformSubgraphSnapshotsToHistoricalResult = ( x: timestamp, y: isNaN(borrowApyValue) ? 0 : borrowApyValue, }); - rates.apyAtTarget.push({ x: timestamp, y: 0 }); - rates.utilization.push({ x: timestamp, y: 0 }); + const supplyAssetsUsd = safeParseFloat(snapshot.totalDepositBalanceUSD); + const borrowAssetsUsd = safeParseFloat(snapshot.totalBorrowBalanceUSD); + const liquidityAssetsUsd = Math.max(0, supplyAssetsUsd - borrowAssetsUsd); const supplyNative = BigInt(snapshot.inputTokenBalance ?? '0'); const borrowNative = BigInt(snapshot.variableBorrowedTokenBalance ?? '0'); const liquidityNative = supplyNative - borrowNative; + const utilization = + supplyNative > 0n ? Number((borrowNative * UTILIZATION_SCALE) / supplyNative) / Number(UTILIZATION_SCALE) : 0; + + rates.apyAtTarget.push({ x: timestamp, y: 0 }); + rates.utilization.push({ x: timestamp, y: utilization }); - volumes.supplyAssetsUsd.push({ x: timestamp, y: 0 }); - volumes.borrowAssetsUsd.push({ x: timestamp, y: 0 }); - volumes.liquidityAssetsUsd.push({ x: timestamp, y: 0 }); + volumes.supplyAssetsUsd.push({ x: timestamp, y: supplyAssetsUsd }); + volumes.borrowAssetsUsd.push({ x: timestamp, y: borrowAssetsUsd }); + volumes.liquidityAssetsUsd.push({ x: timestamp, y: liquidityAssetsUsd }); volumes.supplyAssets.push({ x: timestamp, y: Number(supplyNative) }); volumes.borrowAssets.push({ x: timestamp, y: Number(borrowNative) }); diff --git a/src/data-sources/subgraph/market.ts b/src/data-sources/subgraph/market.ts index b21fda03..3eba7efe 100644 --- a/src/data-sources/subgraph/market.ts +++ b/src/data-sources/subgraph/market.ts @@ -4,6 +4,7 @@ import { formatBalance } from '@/utils/balance'; import type { SupportedNetworks } from '@/utils/networks'; import type { SubgraphMarket, SubgraphMarketQueryResponse, SubgraphMarketsQueryResponse, SubgraphToken } from '@/utils/subgraph-types'; import { getSubgraphUrl } from '@/utils/subgraph-urls'; +import { isForceUnwhitelisted } from '@/utils/markets'; import { blacklistTokens, findToken } from '@/utils/tokens'; import type { Market, MarketUsdPriceSource, MarketWarning } from '@/utils/types'; import { UNRECOGNIZED_COLLATERAL, UNRECOGNIZED_LOAN } from '@/utils/warnings'; @@ -97,7 +98,7 @@ const transformSubgraphMarketToMarket = ( uniqueKey: marketId, lltv, irmAddress: irmAddress as Address, - whitelisted: true, + whitelisted: !isForceUnwhitelisted(marketId), loanAsset, collateralAsset, state: { diff --git a/src/data-sources/user-position.ts b/src/data-sources/user-position.ts index 6f4f582e..e89ad742 100644 --- a/src/data-sources/user-position.ts +++ b/src/data-sources/user-position.ts @@ -1,24 +1,60 @@ import { hasEnvioIndexer, supportsMorphoApi } from '@/config/dataSources'; import { fetchEnvioUserPositionForMarket } from '@/data-sources/envio/positions'; +import { fetchMarketDetails } from '@/data-sources/market-details'; import { fetchMorphoUserPositionForMarket } from '@/data-sources/morpho-api/positions'; +import { getErrorMessage, logDataSourceEvent } from '@/data-sources/shared/source-debug'; import { fetchSubgraphUserPositionForMarket } from '@/data-sources/subgraph/positions'; +import type { CustomRpcUrls } from '@/stores/useCustomRpc'; import type { SupportedNetworks } from '@/utils/networks'; import type { MarketPosition } from '@/utils/types'; +const hydratePositionMarket = async ( + position: MarketPosition, + chainId: SupportedNetworks, + options: { + customRpcUrls?: CustomRpcUrls; + }, +): Promise => { + const hydratedMarket = await fetchMarketDetails(position.market.uniqueKey, chainId, { + customRpcUrls: options.customRpcUrls, + enrichHistoricalApys: false, + }).catch(() => null); + + if (!hydratedMarket) { + return position; + } + + return { + ...position, + market: hydratedMarket, + }; +}; + export const fetchUserPositionForMarket = async ( marketUniqueKey: string, userAddress: string, chainId: SupportedNetworks, + options: { + customRpcUrls?: CustomRpcUrls; + } = {}, ): Promise => { if (hasEnvioIndexer()) { try { - const envioPosition = await fetchEnvioUserPositionForMarket(marketUniqueKey, userAddress, chainId); + const envioPosition = await fetchEnvioUserPositionForMarket(marketUniqueKey, userAddress, chainId, options); if (envioPosition) { - return envioPosition; + logDataSourceEvent('user-position', 'using Envio position source', { + chainId, + marketUniqueKey, + }); + return hydratePositionMarket(envioPosition, chainId, options); } - } catch (envioError) { - console.error('Failed to fetch position via Envio:', envioError); + } catch (error) { + logDataSourceEvent('user-position', 'Envio position fetch failed, falling back', { + chainId, + marketUniqueKey, + reason: getErrorMessage(error), + }); } } @@ -27,12 +63,30 @@ export const fetchUserPositionForMarket = async ( const morphoPosition = await fetchMorphoUserPositionForMarket(marketUniqueKey, userAddress, chainId); if (morphoPosition) { - return morphoPosition; + logDataSourceEvent('user-position', 'using Morpho API fallback for position', { + chainId, + marketUniqueKey, + }); + return hydratePositionMarket(morphoPosition, chainId, options); } - } catch (morphoError) { - console.error('Failed to fetch position via Morpho API:', morphoError); + } catch (error) { + logDataSourceEvent('user-position', 'Morpho API position fetch failed, falling back to subgraph', { + chainId, + marketUniqueKey, + reason: getErrorMessage(error), + }); } } - return fetchSubgraphUserPositionForMarket(marketUniqueKey, userAddress, chainId); + logDataSourceEvent('user-position', 'using subgraph fallback for position', { + chainId, + marketUniqueKey, + }); + const subgraphPosition = await fetchSubgraphUserPositionForMarket(marketUniqueKey, userAddress, chainId); + + if (!subgraphPosition) { + return null; + } + + return hydratePositionMarket(subgraphPosition, chainId, options); }; diff --git a/src/hooks/queries/useMarketMetricsQuery.ts b/src/hooks/queries/useMarketMetricsQuery.ts index cf5a605b..86d2c026 100644 --- a/src/hooks/queries/useMarketMetricsQuery.ts +++ b/src/hooks/queries/useMarketMetricsQuery.ts @@ -121,8 +121,6 @@ const fetchAllMarketMetrics = async (params: MarketMetricsParams): Promise getFeedMetadataSnapshot(oracleMetadataMap), [oracleMetadataMap]); @@ -142,18 +142,18 @@ export function useFeedLastUpdatedByChain(chainId: SupportedNetworks | number | const hintFingerprint = useMemo(() => createHintsFingerprint(hintByAddress), [hintByAddress]); const query = useQuery({ - queryKey: ['feed-snapshot', chainId, addressFingerprint, hintFingerprint], - enabled: Boolean(chainId && publicClient && feedAddresses.length > 0), + queryKey: ['feed-snapshot', chainId, addressFingerprint, hintFingerprint, rpcConfigVersion], + enabled: Boolean(chainId && client && feedAddresses.length > 0), staleTime: FEED_REFRESH_INTERVAL_MS, refetchInterval: FEED_REFRESH_INTERVAL_MS, refetchOnWindowFocus: false, queryFn: async (): Promise => { - if (!publicClient) return {}; + if (!client) return {}; const snapshotByAddress: FeedSnapshotByAddress = {}; const addressChunks = chunkAddresses(feedAddresses); - const blockNumber = await publicClient.getBlockNumber(); - const block = await publicClient.getBlock({ blockNumber }); + const blockNumber = await client.getBlockNumber(); + const block = await client.getBlock({ blockNumber }); const queryBlockTimestamp = Number(block.timestamp); for (const addressChunk of addressChunks) { @@ -170,12 +170,12 @@ export function useFeedLastUpdatedByChain(chainId: SupportedNetworks | number | })); const [roundResults, decimalsResults] = await Promise.all([ - publicClient.multicall({ + client.multicall({ contracts: latestRoundContracts, allowFailure: true, blockNumber, }), - publicClient.multicall({ + client.multicall({ contracts: decimalsContracts, allowFailure: true, blockNumber, diff --git a/src/hooks/useFreshMarketsState.ts b/src/hooks/useFreshMarketsState.ts index 82631693..6aee6edb 100644 --- a/src/hooks/useFreshMarketsState.ts +++ b/src/hooks/useFreshMarketsState.ts @@ -1,21 +1,12 @@ import { useEffect, useMemo } from 'react'; import { useQuery, useQueryClient } from '@tanstack/react-query'; -import { usePublicClient } from 'wagmi'; -import morphoABI from '@/abis/morpho'; -import { getMorphoAddress } from '@/utils/morpho'; +import { useReadOnlyClient } from '@/hooks/useReadOnlyClient'; import type { SupportedNetworks } from '@/utils/networks'; +import { fetchMarketSnapshots, type MarketSnapshot } from '@/utils/positions'; import type { Market } from '@/utils/types'; const REFRESH_INTERVAL = 15_000; // 15 seconds -type MarketSnapshot = { - totalSupplyAssets: string; - totalSupplyShares: string; - totalBorrowAssets: string; - totalBorrowShares: string; - liquidityAssets: string; -}; - /** * Hook to fetch fresh market states using multicall. * Works efficiently for both single and multiple markets. @@ -40,7 +31,7 @@ export const useFreshMarketsState = ( // Derive chainId from first market if not provided const effectiveChainId = chainId ?? (markets?.[0]?.morphoBlue.chain.id as SupportedNetworks | undefined); - const publicClient = usePublicClient({ chainId: effectiveChainId }); + const { client, rpcConfigVersion } = useReadOnlyClient(effectiveChainId); const queryClient = useQueryClient(); // Create stable query key from market unique keys @@ -53,7 +44,7 @@ export const useFreshMarketsState = ( [markets], ); - const queryKey = ['fresh-markets-state', effectiveChainId, marketKeys]; + const queryKey = ['fresh-markets-state', effectiveChainId, marketKeys, rpcConfigVersion]; const { data: snapshots, @@ -63,56 +54,17 @@ export const useFreshMarketsState = ( } = useQuery({ queryKey, queryFn: async () => { - if (!markets || markets.length === 0 || !effectiveChainId || !publicClient) { + if (!markets || markets.length === 0 || !effectiveChainId || !client) { return null; } - console.log(`Reading fresh state for ${markets.length} markets from chain...`); - - // Create multicall contracts for all markets - const contracts = markets.map((market) => ({ - address: getMorphoAddress(effectiveChainId) as `0x${string}`, - abi: morphoABI, - functionName: 'market' as const, - args: [market.uniqueKey as `0x${string}`], - })); - - // Use multicall to batch all market queries into a single RPC call - const results = await publicClient.multicall({ - contracts, - allowFailure: true, - }); - - console.log(`complete reading ${markets.length} market states`); - - // Process results into snapshots map - const snapshotsMap = new Map(); - - results.forEach((result, index) => { - const market = markets[index]; - if (result.status === 'success' && result.result) { - const data = result.result as readonly bigint[]; - const totalSupplyAssets = data[0]; - const totalSupplyShares = data[1]; - const totalBorrowAssets = data[2]; - const totalBorrowShares = data[3]; - const liquidityAssets = totalSupplyAssets - totalBorrowAssets; - - snapshotsMap.set(market.uniqueKey, { - totalSupplyAssets: totalSupplyAssets.toString(), - totalSupplyShares: totalSupplyShares.toString(), - totalBorrowAssets: totalBorrowAssets.toString(), - totalBorrowShares: totalBorrowShares.toString(), - liquidityAssets: liquidityAssets.toString(), - }); - } else { - console.warn(`Failed to fetch snapshot for market ${market.uniqueKey}`); - } - }); - - return snapshotsMap; + return fetchMarketSnapshots( + markets.map((market) => market.uniqueKey), + effectiveChainId, + client, + ); }, - enabled: !!markets && markets.length > 0 && !!effectiveChainId && !!publicClient, + enabled: !!markets && markets.length > 0 && !!effectiveChainId && !!client, staleTime: 0, // Always fetch fresh when requested gcTime: 20_000, // Keep in cache for 20 seconds refetchOnWindowFocus: false, @@ -137,7 +89,7 @@ export const useFreshMarketsState = ( if (!snapshots) return markets; return markets.map((market) => { - const snapshot = snapshots.get(market.uniqueKey); + const snapshot = snapshots.get(market.uniqueKey) as MarketSnapshot | undefined; if (!snapshot) return market; return { diff --git a/src/hooks/useMarketData.ts b/src/hooks/useMarketData.ts index 9fce5ba0..3a75d2d9 100644 --- a/src/hooks/useMarketData.ts +++ b/src/hooks/useMarketData.ts @@ -1,17 +1,15 @@ import { useMemo } from 'react'; import { useQuery } from '@tanstack/react-query'; -import { usePublicClient } from 'wagmi'; -import { useCustomRpcContext } from '@/components/providers/CustomRpcProvider'; import { fetchMarketDetails } from '@/data-sources/market-details'; import { useOracleDataQuery } from '@/hooks/queries/useOracleDataQuery'; +import { useReadOnlyClient } from '@/hooks/useReadOnlyClient'; import type { SupportedNetworks } from '@/utils/networks'; import { fetchMarketSnapshot } from '@/utils/positions'; import type { Market } from '@/utils/types'; export const useMarketData = (uniqueKey: string | undefined, network: SupportedNetworks | undefined) => { - const { customRpcUrls, rpcConfigVersion } = useCustomRpcContext(); + const { client, customRpcUrls, rpcConfigVersion } = useReadOnlyClient(network); const queryKey = ['marketData', uniqueKey, network, rpcConfigVersion]; - const publicClient = usePublicClient({ chainId: network }); const { getOracleData } = useOracleDataQuery(); const { data, isLoading, error, refetch } = useQuery({ @@ -21,7 +19,7 @@ export const useMarketData = (uniqueKey: string | undefined, network: SupportedN return null; } - if (!publicClient) { + if (!client) { console.error('Public client not available'); return null; } @@ -29,7 +27,7 @@ export const useMarketData = (uniqueKey: string | undefined, network: SupportedN // 1. Try fetching the on-chain market snapshot first let snapshot = null; try { - snapshot = await fetchMarketSnapshot(uniqueKey, network, publicClient); + snapshot = await fetchMarketSnapshot(uniqueKey, network, client); } catch (snapshotError) { console.error(`Error fetching market snapshot for ${uniqueKey}:`, snapshotError); // Snapshot fetch failed, will proceed to fallback fetch diff --git a/src/hooks/useReadOnlyClient.ts b/src/hooks/useReadOnlyClient.ts new file mode 100644 index 00000000..e8cd3c18 --- /dev/null +++ b/src/hooks/useReadOnlyClient.ts @@ -0,0 +1,23 @@ +import { useMemo } from 'react'; +import { useCustomRpcContext } from '@/components/providers/CustomRpcProvider'; +import { type SupportedNetworks, isSupportedChain } from '@/utils/networks'; +import { getClient } from '@/utils/rpc'; + +export const useReadOnlyClient = (chainId: SupportedNetworks | number | undefined) => { + const { customRpcUrls, rpcConfigVersion } = useCustomRpcContext(); + + const client = useMemo(() => { + if (chainId == null || !isSupportedChain(chainId)) { + return null; + } + + const supportedChainId = chainId as SupportedNetworks; + return getClient(supportedChainId, customRpcUrls[supportedChainId]); + }, [chainId, customRpcUrls, rpcConfigVersion]); + + return { + client, + customRpcUrls, + rpcConfigVersion, + }; +}; diff --git a/src/hooks/useUserPosition.ts b/src/hooks/useUserPosition.ts index 0addc249..41d2fd45 100644 --- a/src/hooks/useUserPosition.ts +++ b/src/hooks/useUserPosition.ts @@ -1,7 +1,8 @@ import { useQuery } from '@tanstack/react-query'; import type { Address } from 'viem'; -import { usePublicClient } from 'wagmi'; import { fetchUserPositionForMarket } from '@/data-sources/user-position'; +import { useReadOnlyClient } from '@/hooks/useReadOnlyClient'; +import { getChainScopedMarketKey } from '@/utils/marketIdentity'; import type { SupportedNetworks } from '@/utils/networks'; import { fetchPositionSnapshot } from '@/utils/positions'; import type { MarketPosition } from '@/utils/types'; @@ -19,10 +20,10 @@ import { useProcessedMarkets } from './useProcessedMarkets'; * @returns User position data, loading state, error state, and refetch function. */ const useUserPosition = (user: string | undefined, chainId: SupportedNetworks | undefined, marketKey: string | undefined) => { - const queryKey = ['userPosition', user, chainId, marketKey]; + const { client, customRpcUrls, rpcConfigVersion } = useReadOnlyClient(chainId); + const queryKey = ['userPosition', user, chainId, marketKey, rpcConfigVersion]; const { allMarkets: markets } = useProcessedMarkets(); - const publicClient = usePublicClient({ chainId }); const { data, @@ -37,7 +38,7 @@ const useUserPosition = (user: string | undefined, chainId: SupportedNetworks | return null; } - if (!publicClient) { + if (!client) { console.error('Public client not available'); return null; } @@ -45,7 +46,7 @@ const useUserPosition = (user: string | undefined, chainId: SupportedNetworks | // 1. Try fetching the on-chain snapshot first let snapshot = null; try { - snapshot = await fetchPositionSnapshot(marketKey, user as Address, chainId, undefined, publicClient); + snapshot = await fetchPositionSnapshot(marketKey, user as Address, chainId, undefined, client); } catch (snapshotError) { console.error(`Error fetching position snapshot for ${user} on market ${marketKey}:`, snapshotError); // Snapshot fetch failed, will proceed to fallback fetch @@ -55,7 +56,11 @@ const useUserPosition = (user: string | undefined, chainId: SupportedNetworks | if (snapshot) { // Snapshot succeeded, try to use local market data first - const market = markets?.find((m) => m.uniqueKey.toLowerCase() === marketKey.toLowerCase()); + const scopedMarketKey = getChainScopedMarketKey(marketKey, chainId); + const market = markets?.find( + (candidateMarket) => + getChainScopedMarketKey(candidateMarket.uniqueKey, candidateMarket.morphoBlue.chain.id) === scopedMarketKey, + ); if (market) { // Local market data found, construct position directly @@ -73,7 +78,9 @@ const useUserPosition = (user: string | undefined, chainId: SupportedNetworks | } else { // Local market data NOT found, need to fetch from fallback to get structure console.warn(`Local market data not found for ${marketKey}. Fetching from fallback source to combine with snapshot.`); - const fallbackPosition = await fetchUserPositionForMarket(marketKey, user, chainId); + const fallbackPosition = await fetchUserPositionForMarket(marketKey, user, chainId, { + customRpcUrls, + }); if (fallbackPosition) { // Fallback succeeded, combine with snapshot state @@ -95,7 +102,9 @@ const useUserPosition = (user: string | undefined, chainId: SupportedNetworks | } } else { // Snapshot failed, rely entirely on the fallback data source - finalPosition = await fetchUserPositionForMarket(marketKey, user, chainId); + finalPosition = await fetchUserPositionForMarket(marketKey, user, chainId, { + customRpcUrls, + }); } // If finalPosition has zero balances, it's still a valid position state from the snapshot or fallback return finalPosition; diff --git a/src/hooks/useUserPositions.ts b/src/hooks/useUserPositions.ts index d4c660c4..f0497b3c 100644 --- a/src/hooks/useUserPositions.ts +++ b/src/hooks/useUserPositions.ts @@ -1,6 +1,7 @@ import { useCallback } from 'react'; import { useQuery, useQueryClient } from '@tanstack/react-query'; import type { Address } from 'viem'; +import { useCustomRpcContext } from '@/components/providers/CustomRpcProvider'; import { fetchUserPositionMarkets } from '@/data-sources/position-markets'; import { getChainScopedMarketKey } from '@/utils/marketIdentity'; import { SupportedNetworks } from '@/utils/networks'; @@ -8,7 +9,6 @@ import { fetchLatestPositionSnapshotsWithOraclePrices, type PositionSnapshot, ty import { getClient } from '@/utils/rpc'; import type { Market, MarketPosition } from '@/utils/types'; import { useUserMarketsCache } from '@/stores/useUserMarketsCache'; -import { useCustomRpc } from '@/stores/useCustomRpc'; import { useProcessedMarkets } from './useProcessedMarkets'; // Type for market key and chain identifier @@ -52,7 +52,7 @@ const useUserPositions = (user: string | undefined, showEmpty = false, chainIds? const { allMarkets } = useProcessedMarkets(); const { getUserMarkets, batchAddUserMarkets } = useUserMarketsCache(user); - const { customRpcUrls } = useCustomRpc(); + const { customRpcUrls, rpcConfigVersion } = useCustomRpcContext(); // 1. Query for initial data: Fetch keys from sources, combine with cache, deduplicate const { @@ -101,7 +101,7 @@ const useUserPositions = (user: string | undefined, showEmpty = false, chainIds? isLoading: isLoadingEnhanced, isRefetching: isRefetchingEnhanced, } = useQuery({ - queryKey: positionKeys.enhanced(user, initialData), + queryKey: [...positionKeys.enhanced(user, initialData), rpcConfigVersion], queryFn: async () => { if (!initialData || !user) throw new Error('Assertion failed: initialData/user should be defined here.'); diff --git a/src/utils/positions.ts b/src/utils/positions.ts index ab369390..b347ac2f 100644 --- a/src/utils/positions.ts +++ b/src/utils/positions.ts @@ -322,38 +322,73 @@ export async function fetchMarketSnapshot( client: PublicClient, blockNumber?: number, ): Promise { + const snapshots = await fetchMarketSnapshots([marketId], chainId, client, blockNumber); + return snapshots.get(marketId) ?? null; +} + +/** + * Fetches market snapshots for one or more markets using multicall. + * + * @param marketIds - Array of market unique IDs + * @param chainId - The chain ID of the network + * @param client - The viem PublicClient to use for the request + * @param blockNumber - The block number to fetch the market at (undefined for latest) + * @returns Map of marketId to market snapshot + */ +export async function fetchMarketSnapshots( + marketIds: string[], + chainId: number, + client: PublicClient, + blockNumber?: number, +): Promise> { + const snapshots = new Map(); + + if (marketIds.length === 0) { + return snapshots; + } + try { const isLatest = blockNumber === undefined; - - // Get the market data - const marketArray = (await client.readContract({ - address: getMorphoAddress(chainId as SupportedNetworks), - abi: morphoABI, - functionName: 'market', - args: [marketId as `0x${string}`], + const morphoAddress = getMorphoAddress(chainId as SupportedNetworks); + const results = await client.multicall({ + allowFailure: true, blockNumber: isLatest ? undefined : BigInt(blockNumber), - })) as readonly bigint[]; + contracts: marketIds.map((currentMarketId) => ({ + address: morphoAddress as `0x${string}`, + abi: morphoABI, + functionName: 'market' as const, + args: [currentMarketId as `0x${string}`], + })), + }); + + results.forEach((result, index) => { + const currentMarketId = marketIds[index]; + + if (!currentMarketId || result.status !== 'success' || !result.result) { + return; + } - // Convert array to market object - const market = arrayToMarket(marketArray); + const market = arrayToMarket(result.result as readonly bigint[]); + const liquidityAssets = market.totalSupplyAssets - market.totalBorrowAssets; - const liquidityAssets = market.totalSupplyAssets - market.totalBorrowAssets; + snapshots.set(currentMarketId, { + totalSupplyAssets: market.totalSupplyAssets.toString(), + totalSupplyShares: market.totalSupplyShares.toString(), + totalBorrowAssets: market.totalBorrowAssets.toString(), + totalBorrowShares: market.totalBorrowShares.toString(), + liquidityAssets: liquidityAssets.toString(), + }); + }); - return { - totalSupplyAssets: market.totalSupplyAssets.toString(), - totalSupplyShares: market.totalSupplyShares.toString(), - totalBorrowAssets: market.totalBorrowAssets.toString(), - totalBorrowShares: market.totalBorrowShares.toString(), - liquidityAssets: liquidityAssets.toString(), - }; + return snapshots; } catch (error) { - console.error('Error reading market:', { - marketId, + console.error('Error reading markets:', { + marketIds, chainId, blockNumber, error, }); - return null; + return snapshots; } } From f124ee855aafe84788895aa47b7aa5acaa86217f Mon Sep 17 00:00:00 2001 From: antoncoding Date: Sat, 14 Mar 2026 18:58:42 +0800 Subject: [PATCH 3/5] chore: more codechange --- AGENTS.md | 2 + src/data-sources/envio/events.ts | 27 +- src/data-sources/envio/historical.ts | 18 +- src/data-sources/envio/market-participants.ts | 11 +- src/data-sources/envio/market.ts | 69 +++-- src/data-sources/envio/transactions.ts | 13 +- src/data-sources/envio/utils.ts | 8 + src/data-sources/market-activity.ts | 6 +- src/data-sources/market-catalog.ts | 79 ++++-- src/data-sources/market-details.ts | 9 +- src/data-sources/morpho-api/market-borrows.ts | 2 + .../morpho-api/market-liquidations.ts | 3 +- .../morpho-api/market-supplies.ts | 2 + src/data-sources/morpho-api/market.ts | 20 +- src/data-sources/position-markets.ts | 33 ++- .../shared/historical-chain-context.ts | 29 ++- .../shared/market-rate-enrichment.ts | 4 +- src/data-sources/shared/market-usd.ts | 29 ++- src/data-sources/shared/token-metadata.ts | 26 +- src/data-sources/shared/token-prices.ts | 241 ++++++++++++++++++ src/data-sources/subgraph/historical.ts | 1 - src/data-sources/user-position.ts | 67 ++++- .../components/borrows-table.tsx | 2 +- .../components/supplies-table.tsx | 2 +- .../components/table/market-row-detail.tsx | 10 +- .../components/table/market-table-body.tsx | 3 + .../components/table/market-table-utils.tsx | 4 +- src/graphql/envio-queries.ts | 21 ++ src/graphql/morpho-api-queries.ts | 9 +- src/hooks/useTokenPrices.ts | 173 +------------ src/hooks/useUserPosition.ts | 19 +- src/utils/tokenCatalog.ts | 13 +- 32 files changed, 659 insertions(+), 296 deletions(-) create mode 100644 src/data-sources/shared/token-prices.ts diff --git a/AGENTS.md b/AGENTS.md index a5034d4a..84b5fd65 100644 --- a/AGENTS.md +++ b/AGENTS.md @@ -174,6 +174,8 @@ When touching transaction and position flows, validation MUST include all releva 38. **RPC configuration reactivity integrity**: any query or cache that depends on the active custom RPC selection must key or invalidate at the exact RPC-dependent layer (for example snapshot/enrichment queries), while RPC-independent discovery queries must not churn on RPC changes. Custom-RPC switching must not leave position or market state pinned to data fetched through the previous endpoint. 39. **Sparse market contract parity**: any fallback or single-entity market/position path that starts from sparse source data must hydrate missing market fields through the shared market-detail/catalog enrichment chokepoints before the result reaches shared UI consumers. Do not let raw fallback markets bypass shared USD, target-rate, blacklist, or chain-scoped identity normalization. 40. **Indexer market pre-hydration integrity**: shared market-catalog/indexer adapters must exclude structurally invalid markets (for example zero-address IRM or collateral token, plus local blacklist gates) before token-metadata hydration or enrichment begins, and source logs must distinguish raw fetch completion from downstream enrichment so slow stages are attributable. +41. **Pagination completeness integrity**: shared pagination utilities must validate positive page sizes/non-negative limits, and any paged market/participant/history source must either fetch complete results or fail closed to fallback. Do not silently cap user-visible datasets at arbitrary ceilings or treat incomplete/invalid pages as successful partial data. +42. **Indexed empty-result integrity**: shared indexer-backed market/position/transaction adapters must distinguish a legitimate empty domain result from an internal hydration/filtering failure. Empty arrays/zero counts are authoritative only when the adapter completed without downstream mapping, metadata, or market-hydration errors; otherwise throw and let the shared fallback layer decide. ### REQUIRED: Regression Rule Capture diff --git a/src/data-sources/envio/events.ts b/src/data-sources/envio/events.ts index 97b97060..d7196920 100644 --- a/src/data-sources/envio/events.ts +++ b/src/data-sources/envio/events.ts @@ -1,5 +1,6 @@ import { envioBorrowEventsQuery, + envioLatestBorrowRateUpdateBeforeQuery, envioBorrowRateUpdatesQuery, envioLiquidationsQuery, envioRepayEventsQuery, @@ -13,7 +14,7 @@ import { envioGraphqlFetcher } from './fetchers'; import { fetchAllEnvioPages } from './utils'; const ENVIO_EVENTS_PAGE_SIZE = 500; -const ENVIO_EVENTS_MAX_ITEMS = 1000; +const ENVIO_EVENTS_MAX_ITEMS = Number.MAX_SAFE_INTEGER; const ENVIO_EVENTS_TIMEOUT_MS = 15_000; export type EnvioLoanEventRow = { @@ -266,3 +267,27 @@ export const fetchEnvioBorrowRateUpdates = async ({ pageSize: ENVIO_EVENTS_PAGE_SIZE, }); }; + +export const fetchLatestEnvioBorrowRateUpdateBefore = async ({ + chainId, + marketId, + timestampLte, +}: { + chainId: SupportedNetworks; + marketId: string; + timestampLte: number; +}): Promise => { + const response = await envioGraphqlFetcher( + envioLatestBorrowRateUpdateBeforeQuery, + { + chainId, + marketId: marketId.toLowerCase(), + timestampLte, + }, + { + timeoutMs: ENVIO_EVENTS_TIMEOUT_MS, + }, + ); + + return response.data?.AdaptiveCurveIrm_BorrowRateUpdate?.[0] ?? null; +}; diff --git a/src/data-sources/envio/historical.ts b/src/data-sources/envio/historical.ts index 3bddd023..4a06443b 100644 --- a/src/data-sources/envio/historical.ts +++ b/src/data-sources/envio/historical.ts @@ -3,7 +3,7 @@ import { type Address, formatUnits } from 'viem'; import morphoAbi from '@/abis/morpho'; import { fetchMarketDetails } from '@/data-sources/market-details'; import type { HistoricalDataSuccessResult } from '@/data-sources/morpho-api/historical'; -import { fetchEnvioBorrowRateUpdates } from '@/data-sources/envio/events'; +import { fetchEnvioBorrowRateUpdates, fetchLatestEnvioBorrowRateUpdateBefore } from '@/data-sources/envio/events'; import type { CustomRpcUrls } from '@/stores/useCustomRpc'; import type { BlockWithTimestamp } from '@/utils/blockEstimation'; import { getMorphoAddress } from '@/utils/morpho'; @@ -198,16 +198,18 @@ const buildHistoricalResult = ({ loanAssetPrice, market, rateUpdates, + seedRateAtTarget, }: { historicalStates: { state: HistoricalMarketState; timestamp: number }[]; loanAssetDecimals: number; loanAssetPrice: number; market: NonNullable>>; rateUpdates: Awaited>; + seedRateAtTarget: bigint; }): HistoricalDataSuccessResult => { const result = buildEmptyResult(); const sortedUpdates = [...rateUpdates].sort((left, right) => normalizeEnvioTimestamp(left.timestamp) - normalizeEnvioTimestamp(right.timestamp)); - let rateAtTarget = normalizeRateAtTarget(market.state.rateAtTarget); + let rateAtTarget = seedRateAtTarget; let updateIndex = 0; for (const historicalPoint of historicalStates) { @@ -295,7 +297,7 @@ export const fetchEnvioMarketHistoricalData = async ( return null; } - const [historicalStates, rateUpdates] = await Promise.all([ + const [historicalStates, rateUpdates, latestRateUpdateBeforeWindow] = await Promise.all([ fetchHistoricalStates({ blocks: chainContext.historicalBlocks, chainId: network, @@ -307,7 +309,12 @@ export const fetchEnvioMarketHistoricalData = async ( marketId, timestampGte: options.startTimestamp, timestampLte: options.endTimestamp, - }).catch(() => []), + }), + fetchLatestEnvioBorrowRateUpdateBefore({ + chainId: network, + marketId, + timestampLte: options.startTimestamp, + }), ]); if (historicalStates.length === 0) { @@ -320,5 +327,8 @@ export const fetchEnvioMarketHistoricalData = async ( loanAssetPrice: deriveLoanAssetPrice(market), market, rateUpdates, + seedRateAtTarget: latestRateUpdateBeforeWindow + ? normalizeRateAtTarget(normalizeEnvioString(latestRateUpdateBeforeWindow.rateAtTarget)) + : normalizeRateAtTarget(market.state.rateAtTarget), }); }; diff --git a/src/data-sources/envio/market-participants.ts b/src/data-sources/envio/market-participants.ts index 74f5c2d8..ff65d141 100644 --- a/src/data-sources/envio/market-participants.ts +++ b/src/data-sources/envio/market-participants.ts @@ -6,7 +6,7 @@ import { envioGraphqlFetcher } from './fetchers'; import { fetchAllEnvioPages, normalizeEnvioString } from './utils'; const ENVIO_PARTICIPANTS_PAGE_SIZE = 500; -const ENVIO_PARTICIPANTS_MAX_ITEMS = 1000; +const ENVIO_PARTICIPANTS_MAX_ITEMS = Number.MAX_SAFE_INTEGER; const ENVIO_PARTICIPANTS_TIMEOUT_MS = 15_000; type EnvioSupplierRow = { @@ -85,7 +85,7 @@ export const fetchEnvioMarketSuppliers = async ( _eq: marketId.toLowerCase(), }, supplyShares: { - _gt: minShares, + _gte: minShares, }, }; @@ -121,7 +121,7 @@ export const fetchEnvioMarketBorrowers = async ( ): Promise => { const where = { borrowShares: { - _gt: minShares, + _gte: minShares, }, chainId: { _eq: chainId, @@ -147,10 +147,7 @@ export const fetchEnvioMarketBorrowers = async ( ]); if (!market) { - return { - items: [], - totalCount: 0, - }; + throw new Error(`Failed to hydrate Envio market ${marketId} on chain ${chainId} for borrower mapping`); } const items: MarketBorrower[] = borrowers.map((borrower) => { diff --git a/src/data-sources/envio/market.ts b/src/data-sources/envio/market.ts index daf0a5a2..0bcfdb8b 100644 --- a/src/data-sources/envio/market.ts +++ b/src/data-sources/envio/market.ts @@ -36,6 +36,7 @@ type EnvioMarketsResponse = { }; const ENVIO_MARKETS_PAGE_SIZE = 1000; +const ENVIO_MARKETS_PAGE_BATCH_SIZE = 4; const ENVIO_MARKETS_TIMEOUT_MS = 20_000; const normalizeAddress = (value: string | number | null | undefined): Address => { @@ -67,11 +68,11 @@ const normalizeRateAtTarget = (value: string | number | null | undefined): bigin } }; -const toFallbackTokenInfo = (address: string): TokenInfo => { +const toFallbackTokenInfo = (address: string, chainId: SupportedNetworks): TokenInfo => { return { address, decimals: 18, - id: address, + id: infoToKey(address, chainId), name: 'Unknown Token', symbol: 'Unknown', }; @@ -89,6 +90,7 @@ const withVisibleMarketsFilter = (where: Record): Record): Record { - return normalizeAddress(market.collateralToken) === zeroAddress || normalizeAddress(market.irm) === zeroAddress; + return ( + normalizeAddress(market.loanToken) === zeroAddress || + normalizeAddress(market.collateralToken) === zeroAddress || + normalizeAddress(market.irm) === zeroAddress + ); }; const fetchEnvioMarketsPage = async ({ @@ -145,13 +151,14 @@ const buildEnvioMarket = (market: EnvioMarketRow, tokenMetadataMap: Map => { const rows: EnvioMarketRow[] = []; - for (let offset = 0; ; offset += ENVIO_MARKETS_PAGE_SIZE) { - const page = await fetchEnvioMarketsPage({ - limit: ENVIO_MARKETS_PAGE_SIZE, - offset, - where: { - chainId: { - _in: chainIds, - }, - }, - }); - if (page.length === 0) break; + for (let offset = 0; ; offset += ENVIO_MARKETS_PAGE_SIZE * ENVIO_MARKETS_PAGE_BATCH_SIZE) { + const offsets = Array.from({ length: ENVIO_MARKETS_PAGE_BATCH_SIZE }, (_, index) => offset + index * ENVIO_MARKETS_PAGE_SIZE); + const pages = await Promise.all( + offsets.map((currentOffset) => + fetchEnvioMarketsPage({ + limit: ENVIO_MARKETS_PAGE_SIZE, + offset: currentOffset, + where: { + chainId: { + _in: chainIds, + }, + }, + }), + ), + ); + + let reachedEnd = false; - rows.push(...page); + for (const page of pages) { + if (page.length === 0) { + reachedEnd = true; + break; + } + + rows.push(...page); + + if (page.length < ENVIO_MARKETS_PAGE_SIZE) { + reachedEnd = true; + break; + } + } - if (page.length < ENVIO_MARKETS_PAGE_SIZE) { + if (reachedEnd) { break; } } diff --git a/src/data-sources/envio/transactions.ts b/src/data-sources/envio/transactions.ts index 5a275b1c..1fe89faf 100644 --- a/src/data-sources/envio/transactions.ts +++ b/src/data-sources/envio/transactions.ts @@ -3,6 +3,7 @@ import { fetchMarketDetails } from '@/data-sources/market-details'; import { fetchEnvioMarketsByKeys } from '@/data-sources/envio/market'; import { getChainScopedMarketKey } from '@/utils/marketIdentity'; import type { SupportedNetworks } from '@/utils/networks'; +import { infoToKey } from '@/utils/tokens'; import { type UserTransaction, UserTxTypes } from '@/utils/types'; import { fetchEnvioBorrowRows, @@ -107,18 +108,24 @@ const matchesAssetFilter = async ({ } } + if (marketMap.size !== uniqueMarketIds.length) { + throw new Error( + `Failed to hydrate ${uniqueMarketIds.length - marketMap.size} Envio transaction markets for asset filtering on chain ${chainId}`, + ); + } + return transactions.filter((transaction) => { const market = marketMap.get(transaction.data.market.uniqueKey.toLowerCase()); - if (!market) { - return false; + throw new Error(`Missing hydrated market for Envio transaction ${transaction.hash} on chain ${chainId}`); } const isCollateralTransaction = transaction.type === UserTxTypes.MarketSupplyCollateral || transaction.type === UserTxTypes.MarketWithdrawCollateral; const relevantAsset = isCollateralTransaction ? market.collateralAsset.address : market.loanAsset.address; + const canonicalAssetId = infoToKey(relevantAsset, chainId); - return normalizedAssetIds.has(relevantAsset.toLowerCase()); + return normalizedAssetIds.has(relevantAsset.toLowerCase()) || normalizedAssetIds.has(canonicalAssetId); }); }; diff --git a/src/data-sources/envio/utils.ts b/src/data-sources/envio/utils.ts index b3a1c8fd..c80daa04 100644 --- a/src/data-sources/envio/utils.ts +++ b/src/data-sources/envio/utils.ts @@ -23,6 +23,14 @@ export const fetchAllEnvioPages = async ({ maxItems?: number; pageSize?: number; }): Promise => { + if (!Number.isInteger(pageSize) || pageSize <= 0) { + throw new Error(`Invalid Envio page size: ${pageSize}. Expected a positive integer.`); + } + + if (!Number.isFinite(maxItems) || maxItems < 0) { + throw new Error(`Invalid Envio maxItems: ${maxItems}. Expected a non-negative number.`); + } + const items: T[] = []; for (let offset = 0; offset < maxItems; offset += pageSize) { diff --git a/src/data-sources/market-activity.ts b/src/data-sources/market-activity.ts index 6a69a9f1..723ebdb6 100644 --- a/src/data-sources/market-activity.ts +++ b/src/data-sources/market-activity.ts @@ -36,7 +36,7 @@ export const fetchMarketSupplies = async ( if (supportsMorphoApi(network)) { try { - return await fetchMorphoMarketSupplies(marketId, minAssets, pageSize, skip); + return await fetchMorphoMarketSupplies(marketId, network, minAssets, pageSize, skip); } catch (error) { logDataSourceEvent('market-supplies', 'Morpho API supplies fetch failed, falling back to subgraph', { chainId: network, @@ -75,7 +75,7 @@ export const fetchMarketBorrows = async ( if (supportsMorphoApi(network)) { try { - return await fetchMorphoMarketBorrows(marketId, minAssets, pageSize, skip); + return await fetchMorphoMarketBorrows(marketId, network, minAssets, pageSize, skip); } catch (error) { logDataSourceEvent('market-borrows', 'Morpho API borrows fetch failed, falling back to subgraph', { chainId: network, @@ -110,7 +110,7 @@ export const fetchMarketLiquidations = async ( if (supportsMorphoApi(network)) { try { - return await fetchMorphoMarketLiquidations(marketId); + return await fetchMorphoMarketLiquidations(marketId, network); } catch (error) { logDataSourceEvent('market-liquidations', 'Morpho API liquidations fetch failed, falling back to subgraph', { chainId: network, diff --git a/src/data-sources/market-catalog.ts b/src/data-sources/market-catalog.ts index 2597537d..8f315e54 100644 --- a/src/data-sources/market-catalog.ts +++ b/src/data-sources/market-catalog.ts @@ -12,6 +12,25 @@ import { ALL_SUPPORTED_NETWORKS, type SupportedNetworks } from '@/utils/networks import type { Market } from '@/utils/types'; const MARKET_ENRICHMENT_TIMEOUT_MS = 8_000; +const ENVIO_MARKET_CATALOG_TIMEOUT_MS = 12_000; + +const withTimeout = async (promise: Promise, timeoutMs: number, label: string): Promise => { + let timeoutHandle: ReturnType | null = null; + + const timeoutPromise = new Promise((_, reject) => { + timeoutHandle = globalThis.setTimeout(() => { + reject(new Error(`${label} timed out after ${timeoutMs}ms`)); + }, timeoutMs); + }); + + try { + return await Promise.race([promise, timeoutPromise]); + } finally { + if (timeoutHandle) { + globalThis.clearTimeout(timeoutHandle); + } + } +}; const enrichCatalogMarkets = async (markets: Market[], customRpcUrls?: CustomRpcUrls): Promise => { const marketsWithTargetRate = await enrichMarketsWithTargetRate(markets, { @@ -43,22 +62,36 @@ const getMissingChainIds = (chainIds: SupportedNetworks[], markets: Market[]): S return chainIds.filter((chainId) => !coveredChainIds.has(chainId)); }; -const fetchMarketsPerNetworkFallback = async (chainIds: SupportedNetworks[]): Promise => { - const results = await Promise.allSettled( - chainIds.map(async (network) => { - if (supportsMorphoApi(network)) { - try { - return await fetchMorphoMarkets(network); - } catch { - return fetchSubgraphMarkets(network); - } +const fetchMarketsForNetwork = async (network: SupportedNetworks): Promise => { + logDataSourceEvent('market-catalog', 'fetching fallback markets for chain', { + chainId: network, + primary: supportsMorphoApi(network) ? 'morpho' : 'subgraph', + }); + + if (supportsMorphoApi(network)) { + try { + return await fetchMorphoMarkets(network); + } catch (morphoError) { + try { + return await fetchSubgraphMarkets(network); + } catch (subgraphError) { + throw new Error( + `Failed to fetch markets for chain ${network}: Morpho API failed (${getErrorMessage(morphoError)}); Subgraph failed (${getErrorMessage(subgraphError)})`, + ); } + } + } - return fetchSubgraphMarkets(network); - }), - ); + try { + return await fetchSubgraphMarkets(network); + } catch (subgraphError) { + throw new Error(`Failed to fetch markets for chain ${network}: Subgraph failed (${getErrorMessage(subgraphError)})`); + } +}; - return filterTokenBlacklistedMarkets(results.flatMap((result) => (result.status === 'fulfilled' ? result.value : []))); +const fetchMarketsPerNetworkFallback = async (chainIds: SupportedNetworks[]): Promise => { + const results = await Promise.all(chainIds.map((network) => fetchMarketsForNetwork(network))); + return filterTokenBlacklistedMarkets(results.flat()); }; export const fetchMarketCatalog = async ( @@ -71,10 +104,17 @@ export const fetchMarketCatalog = async ( if (hasEnvioIndexer()) { try { - const envioFetchStartedAt = Date.now(); - const envioMarkets = await fetchEnvioMarkets(chainIds, { - customRpcUrls, + logDataSourceEvent('market-catalog', 'fetching Envio market catalog', { + chainIds: chainIds.join(','), }); + const envioFetchStartedAt = Date.now(); + const envioMarkets = await withTimeout( + fetchEnvioMarkets(chainIds, { + customRpcUrls, + }), + ENVIO_MARKET_CATALOG_TIMEOUT_MS, + 'Envio market catalog', + ); const envioFetchDurationMs = Date.now() - envioFetchStartedAt; const missingChainIds = getMissingChainIds(chainIds, envioMarkets); @@ -91,7 +131,7 @@ export const fetchMarketCatalog = async ( }); } - logDataSourceEvent('market-catalog', 'Envio fetch completed with incomplete coverage; falling back for missing chains only', { + logDataSourceEvent('market-catalog', 'Envio fetch completed with missing chains; falling back for those chains', { requestedChainIds: chainIds.join(','), coveredChainIds: [...new Set(envioMarkets.map((market) => market.morphoBlue.chain.id))].join(','), missingChainIds: missingChainIds.join(','), @@ -104,6 +144,7 @@ export const fetchMarketCatalog = async ( if (mergedMarkets.length > 0) { logDataSourceEvent('market-catalog', 'merged Envio with fallback markets', { + fallbackChainIds: missingChainIds.join(','), fallbackCount: fallbackMarkets.length, totalCount: mergedMarkets.length, }); @@ -145,8 +186,8 @@ export const fetchMarketCatalog = async ( } if (subgraphOnlyChainIds.length > 0) { - const subgraphResults = await Promise.allSettled(subgraphOnlyChainIds.map((network) => fetchSubgraphMarkets(network))); - markets.push(...filterTokenBlacklistedMarkets(subgraphResults.flatMap((result) => (result.status === 'fulfilled' ? result.value : [])))); + const subgraphMarkets = await Promise.all(subgraphOnlyChainIds.map((network) => fetchSubgraphMarkets(network))); + markets.push(...filterTokenBlacklistedMarkets(subgraphMarkets.flat())); logDataSourceEvent('market-catalog', 'used subgraph fallback for non-Morpho chains', { chainIds: subgraphOnlyChainIds.join(','), diff --git a/src/data-sources/market-details.ts b/src/data-sources/market-details.ts index d82a725d..9f578597 100644 --- a/src/data-sources/market-details.ts +++ b/src/data-sources/market-details.ts @@ -95,17 +95,20 @@ export const fetchMarketDetails = async ( return null; } - const [marketWithUsd] = await fillMissingMarketUsdValues([baseMarket]); + const [marketWithUsd] = await fillMissingMarketUsdValues([baseMarket]).catch(() => [baseMarket]); baseMarket = marketWithUsd ?? baseMarket; + const [marketWithTargetRate] = await enrichMarketsWithTargetRate([baseMarket], { customRpcUrls, - }); + }).catch(() => [baseMarket]); baseMarket = marketWithTargetRate ?? baseMarket; if (!enrichHistoricalApys) { return baseMarket; } - const [enrichedMarket] = await enrichMarketsWithHistoricalApysWithinTimeout([baseMarket], MARKET_ENRICHMENT_TIMEOUT_MS, customRpcUrls); + const [enrichedMarket] = await enrichMarketsWithHistoricalApysWithinTimeout([baseMarket], MARKET_ENRICHMENT_TIMEOUT_MS, customRpcUrls).catch( + () => [baseMarket], + ); return enrichedMarket ?? baseMarket; }; diff --git a/src/data-sources/morpho-api/market-borrows.ts b/src/data-sources/morpho-api/market-borrows.ts index 16a67929..79ee0dcd 100644 --- a/src/data-sources/morpho-api/market-borrows.ts +++ b/src/data-sources/morpho-api/market-borrows.ts @@ -40,11 +40,13 @@ type MorphoAPIBorrowsResponse = { */ export const fetchMorphoMarketBorrows = async ( marketId: string, + chainId: number, minAssets = '0', first = 8, skip = 0, ): Promise => { const variables = { + chainId, uniqueKey: marketId, minAssets, first, diff --git a/src/data-sources/morpho-api/market-liquidations.ts b/src/data-sources/morpho-api/market-liquidations.ts index 1feef13d..e7b926a9 100644 --- a/src/data-sources/morpho-api/market-liquidations.ts +++ b/src/data-sources/morpho-api/market-liquidations.ts @@ -28,8 +28,9 @@ type MorphoAPILiquidationsResponse = { * @param marketId The unique key or ID of the market. * @returns A promise resolving to an array of unified MarketLiquidationTransaction objects. */ -export const fetchMorphoMarketLiquidations = async (marketId: string): Promise => { +export const fetchMorphoMarketLiquidations = async (marketId: string, chainId: number): Promise => { const variables = { + chainId, uniqueKey: marketId, // Morpho API query might not need first/skip for liquidations, adjust if needed }; diff --git a/src/data-sources/morpho-api/market-supplies.ts b/src/data-sources/morpho-api/market-supplies.ts index a101dea2..541063b9 100644 --- a/src/data-sources/morpho-api/market-supplies.ts +++ b/src/data-sources/morpho-api/market-supplies.ts @@ -41,11 +41,13 @@ type MorphoAPISuppliesResponse = { */ export const fetchMorphoMarketSupplies = async ( marketId: string, + chainId: number, minAssets = '0', first = 8, skip = 0, ): Promise => { const variables = { + chainId, uniqueKey: marketId, // Ensure this matches the variable name in the query minAssets, first, diff --git a/src/data-sources/morpho-api/market.ts b/src/data-sources/morpho-api/market.ts index 8fe858e5..cb6c58b7 100644 --- a/src/data-sources/morpho-api/market.ts +++ b/src/data-sources/morpho-api/market.ts @@ -72,7 +72,7 @@ const fetchMorphoMarketsPageForChains = async ( chainIds: SupportedNetworks[], skip: number, pageSize: number, -): Promise => { +): Promise => { const variables = { first: pageSize, skip, @@ -86,8 +86,7 @@ const fetchMorphoMarketsPageForChains = async ( }); if (!response || !response.data?.markets?.items || !response.data.markets.pageInfo) { - console.warn(`[Markets] Skipping failed page at skip=${skip} for chains ${chainIds.join(',')}`); - return null; + throw new Error(`Morpho markets page is incomplete at skip=${skip} for chains ${chainIds.join(',')}`); } const { items, pageInfo } = response.data.markets; @@ -109,22 +108,13 @@ export const fetchMorphoMarketsMultiChain = async (chainIds: SupportedNetworks[] const firstPage = await fetchMorphoMarketsPageForChains(chainIds, 0, pageSize); - if (!firstPage) { - return []; - } - allMarkets.push(...firstPage.items); const firstPageCount = firstPage.items.length; const totalCount = firstPage.totalCount; if (firstPageCount === 0 && totalCount > 0) { - console.warn('Received 0 items in the first page, but total count is positive. Returning first-page result only.'); - return allMarkets.filter( - (market) => - !blacklistTokens.includes(market.collateralAsset?.address.toLowerCase() ?? '') && - !blacklistTokens.includes(market.loanAsset?.address.toLowerCase() ?? ''), - ); + throw new Error('Morpho markets first page returned zero items despite a positive total count.'); } const remainingOffsets: number[] = []; @@ -142,9 +132,7 @@ export const fetchMorphoMarketsMultiChain = async (chainIds: SupportedNetworks[] if (settledPage.status === 'rejected') { throw settledPage.reason; } - if (settledPage.value) { - successfulPages.push(settledPage.value); - } + successfulPages.push(settledPage.value); } successfulPages.forEach((page) => { diff --git a/src/data-sources/position-markets.ts b/src/data-sources/position-markets.ts index 96cb2e37..8aaf23d1 100644 --- a/src/data-sources/position-markets.ts +++ b/src/data-sources/position-markets.ts @@ -11,6 +11,26 @@ type PositionMarket = { chainId: number; }; +const ENVIO_POSITION_DISCOVERY_TIMEOUT_MS = 12_000; + +const withTimeout = async (promise: Promise, timeoutMs: number, label: string): Promise => { + let timeoutHandle: ReturnType | null = null; + + const timeoutPromise = new Promise((_, reject) => { + timeoutHandle = globalThis.setTimeout(() => { + reject(new Error(`${label} timed out after ${timeoutMs}ms`)); + }, timeoutMs); + }); + + try { + return await Promise.race([promise, timeoutPromise]); + } finally { + if (timeoutHandle) { + globalThis.clearTimeout(timeoutHandle); + } + } +}; + const dedupePositionMarkets = (markets: PositionMarket[]): PositionMarket[] => { const uniqueMarkets = new Map(); @@ -49,9 +69,20 @@ export const fetchUserPositionMarkets = async ( user: string, chainIds: SupportedNetworks[] = ALL_SUPPORTED_NETWORKS, ): Promise => { + logDataSourceEvent('position-markets', 'fetching user position markets', { + chainIds: chainIds.join(','), + hasEnvioIndexer: hasEnvioIndexer(), + }); + if (hasEnvioIndexer()) { try { - const positionMarkets = dedupePositionMarkets(await fetchEnvioUserPositionMarkets(user, chainIds)); + const positionMarkets = dedupePositionMarkets( + await withTimeout( + fetchEnvioUserPositionMarkets(user, chainIds), + ENVIO_POSITION_DISCOVERY_TIMEOUT_MS, + 'Envio position discovery', + ), + ); logDataSourceEvent('position-markets', 'using Envio cross-chain position discovery', { chainIds: chainIds.join(','), count: positionMarkets.length, diff --git a/src/data-sources/shared/historical-chain-context.ts b/src/data-sources/shared/historical-chain-context.ts index 04060fbb..5979889f 100644 --- a/src/data-sources/shared/historical-chain-context.ts +++ b/src/data-sources/shared/historical-chain-context.ts @@ -28,6 +28,10 @@ const withTimeout = async (promise: Promise, timeoutMs: number, fallbackVa } }; +const getRemainingTimeoutMs = (deadlineAt: number): number => { + return Math.max(0, deadlineAt - Date.now()); +}; + export type HistoricalChainContext = { currentBlockNumber: bigint; currentTimestamp: number; @@ -49,7 +53,7 @@ export const fetchHistoricalChainContext = async ({ }): Promise => { const targetSignature = targetLookbackSeconds && targetLookbackSeconds.length > 0 ? `lookback:${targetLookbackSeconds.join(',')}` : `ts:${(targetTimestamps ?? []).join(',')}`; - const cacheKey = `${chainId}:${targetSignature}`; + const cacheKey = `${chainId}:${targetSignature}:${timeoutMs}`; const now = Date.now(); const cachedByClient = historicalChainContextCache.get(client); const cachedEntry = cachedByClient?.get(cacheKey); @@ -59,13 +63,24 @@ export const fetchHistoricalChainContext = async ({ } const requestPromise = (async (): Promise => { - const currentBlockNumber = await withTimeout(client.getBlockNumber(), timeoutMs, null); + const deadlineAt = Date.now() + timeoutMs; + const currentBlockTimeoutMs = getRemainingTimeoutMs(deadlineAt); + if (currentBlockTimeoutMs === 0) { + return null; + } + + const currentBlockNumber = await withTimeout(client.getBlockNumber(), currentBlockTimeoutMs, null); if (currentBlockNumber == null) { return null; } - const currentBlock = await withTimeout(client.getBlock({ blockNumber: currentBlockNumber }), timeoutMs, null); + const currentBlockFetchTimeoutMs = getRemainingTimeoutMs(deadlineAt); + if (currentBlockFetchTimeoutMs === 0) { + return null; + } + + const currentBlock = await withTimeout(client.getBlock({ blockNumber: currentBlockNumber }), currentBlockFetchTimeoutMs, null); if (!currentBlock) { return null; @@ -76,9 +91,15 @@ export const fetchHistoricalChainContext = async ({ targetLookbackSeconds && targetLookbackSeconds.length > 0 ? targetLookbackSeconds.map((seconds) => currentTimestamp - seconds) : (targetTimestamps ?? []); + const historicalBlocksTimeoutMs = getRemainingTimeoutMs(deadlineAt); + + if (historicalBlocksTimeoutMs === 0) { + return null; + } + const historicalBlocks = await withTimeout( fetchBlocksWithTimestamps(client, chainId, resolvedTargetTimestamps, Number(currentBlockNumber), currentTimestamp), - timeoutMs, + historicalBlocksTimeoutMs, [], ); diff --git a/src/data-sources/shared/market-rate-enrichment.ts b/src/data-sources/shared/market-rate-enrichment.ts index 8a7798ff..f2751e30 100644 --- a/src/data-sources/shared/market-rate-enrichment.ts +++ b/src/data-sources/shared/market-rate-enrichment.ts @@ -261,9 +261,10 @@ const enrichChainMarkets = async ( return markets; } + const requestedTimeoutMs = options.timeoutMs ?? CHAIN_ENRICHMENT_TIMEOUT_MS; const customRpcKey = options.customRpcUrls?.[chainId] ?? 'default'; const marketKey = [...new Set(markets.map((market) => market.uniqueKey))].sort().join(','); - const pendingKey = `${chainId}:${customRpcKey}:${marketKey}`; + const pendingKey = `${chainId}:${customRpcKey}:${requestedTimeoutMs}:${marketKey}`; const pendingRequest = pendingChainHistoricalEnrichment.get(pendingKey); if (pendingRequest) { @@ -271,7 +272,6 @@ const enrichChainMarkets = async ( } const requestPromise = (async (): Promise => { - const requestedTimeoutMs = options.timeoutMs ?? CHAIN_ENRICHMENT_TIMEOUT_MS; const timeoutMs = getHistoricalEnrichmentTimeoutMs(markets.length, requestedTimeoutMs); const chunkSize = getHistoricalMulticallChunkSize(chainId, markets.length); const deadlineMs = Date.now() + timeoutMs; diff --git a/src/data-sources/shared/market-usd.ts b/src/data-sources/shared/market-usd.ts index ca918bdc..54fa0704 100644 --- a/src/data-sources/shared/market-usd.ts +++ b/src/data-sources/shared/market-usd.ts @@ -1,6 +1,7 @@ -import { fetchTokenPrices, getTokenPriceKey, type TokenPriceInput } from '@/data-sources/morpho-api/prices'; +import { getTokenPriceKey, type TokenPriceInput } from '@/data-sources/morpho-api/prices'; import { formatBalance } from '@/utils/balance'; import type { Market, MarketUsdPriceSource } from '@/utils/types'; +import { fetchResolvedTokenPrices } from './token-prices'; const hasPositiveAssets = (value?: string): boolean => { if (!value) return false; @@ -54,11 +55,15 @@ export const collectTokenPriceInputsForMarkets = (markets: Market[]): TokenPrice const needsCollateralUsd = shouldComputeUsd(market.state.collateralAssetsUsd ?? null, market.state.collateralAssets); if (needsLoanUsd) { - addToken(market.loanAsset.address, chainId); + if (market.loanAsset?.address) { + addToken(market.loanAsset.address, chainId); + } } if (needsCollateralUsd) { - addToken(market.collateralAsset.address, chainId); + if (market.collateralAsset?.address) { + addToken(market.collateralAsset.address, chainId); + } } } @@ -86,12 +91,12 @@ export const applyTokenPriceResolutionToMarkets = ( return markets.map((market) => { const chainId = market.morphoBlue.chain.id; - const loanPriceKey = getTokenPriceKey(market.loanAsset.address, chainId); - const collateralPriceKey = getTokenPriceKey(market.collateralAsset.address, chainId); - const loanPrice = tokenPrices.get(loanPriceKey); - const collateralPrice = tokenPrices.get(collateralPriceKey); + const loanPriceKey = market.loanAsset?.address ? getTokenPriceKey(market.loanAsset.address, chainId) : null; + const collateralPriceKey = market.collateralAsset?.address ? getTokenPriceKey(market.collateralAsset.address, chainId) : null; + const loanPrice = loanPriceKey ? tokenPrices.get(loanPriceKey) : undefined; + const collateralPrice = collateralPriceKey ? tokenPrices.get(collateralPriceKey) : undefined; const loanPriceSource = - tokenPriceSources.get(loanPriceKey) ?? (isFinitePositiveNumber(loanPrice) ? 'direct' : undefined); + (loanPriceKey ? tokenPriceSources.get(loanPriceKey) : undefined) ?? (isFinitePositiveNumber(loanPrice) ? 'direct' : undefined); let nextState = market.state; let changed = false; @@ -152,6 +157,10 @@ export const fillMissingMarketUsdValues = async (markets: Market[]): Promise>, ): TokenInfo => { return { address, decimals: metadata?.decimals ?? DEFAULT_TOKEN_DECIMALS, - id: address, + id: infoToKey(address, chainId), name: metadata?.name ?? UNKNOWN_TOKEN_NAME, symbol: metadata?.symbol ?? 'Unknown', }; }; -const toTokenInfoFromCatalog = (address: string, token: Awaited>[number]): TokenInfo => { +const toTokenInfoFromCatalog = ( + address: string, + chainId: SupportedNetworks, + token: Awaited>[number], +): TokenInfo => { return { address, decimals: token.decimals, - id: address, + id: infoToKey(address, chainId), name: token.symbol, symbol: token.symbol, }; @@ -97,7 +102,7 @@ export const fetchTokenMetadataMap = async ( const catalogToken = findTokenInCatalog(tokenCatalog, tokenRef.address, tokenRef.chainId); if (catalogToken) { - const tokenInfo = toTokenInfoFromCatalog(tokenRef.address, catalogToken); + const tokenInfo = toTokenInfoFromCatalog(tokenRef.address, tokenRef.chainId, catalogToken); resolvedTokenMetadataCache.set(key, tokenInfo); metadataMap.set(key, tokenInfo); continue; @@ -115,7 +120,7 @@ export const fetchTokenMetadataMap = async ( unresolvedByChain.set(tokenRef.chainId, chainAddresses); } - await Promise.all( + await Promise.allSettled( Array.from(unresolvedByChain.entries()).map(async ([chainId, addresses]) => { const uniqueAddresses = [...new Set(addresses)]; @@ -123,7 +128,6 @@ export const fetchTokenMetadataMap = async ( return; } - const client = getClient(chainId, customRpcUrls?.[chainId]); const deferredByKey = new Map(); for (const address of uniqueAddresses) { @@ -134,6 +138,8 @@ export const fetchTokenMetadataMap = async ( } try { + const client = getClient(chainId, customRpcUrls?.[chainId]); + for (const addressChunk of chunkAddresses(uniqueAddresses)) { try { const contracts = addressChunk.flatMap((address) => [ @@ -164,7 +170,7 @@ export const fetchTokenMetadataMap = async ( const nameResult = results[index * 3 + 1]; const decimalsResult = results[index * 3 + 2]; - const tokenInfo = createFallbackTokenInfo(address, { + const tokenInfo = createFallbackTokenInfo(address, chainId, { decimals: decimalsResult?.status === 'success' && typeof decimalsResult.result === 'number' ? decimalsResult.result @@ -184,8 +190,7 @@ export const fetchTokenMetadataMap = async ( } catch { for (const address of addressChunk) { const key = infoToKey(address, chainId); - const tokenInfo = createFallbackTokenInfo(address); - resolvedTokenMetadataCache.set(key, tokenInfo); + const tokenInfo = createFallbackTokenInfo(address, chainId); metadataMap.set(key, tokenInfo); deferredByKey.get(key)?.resolve(tokenInfo); } @@ -194,8 +199,7 @@ export const fetchTokenMetadataMap = async ( } catch { for (const address of uniqueAddresses) { const key = infoToKey(address, chainId); - const tokenInfo = createFallbackTokenInfo(address); - resolvedTokenMetadataCache.set(key, tokenInfo); + const tokenInfo = createFallbackTokenInfo(address, chainId); metadataMap.set(key, tokenInfo); deferredByKey.get(key)?.resolve(tokenInfo); } diff --git a/src/data-sources/shared/token-prices.ts b/src/data-sources/shared/token-prices.ts new file mode 100644 index 00000000..e7f87214 --- /dev/null +++ b/src/data-sources/shared/token-prices.ts @@ -0,0 +1,241 @@ +import { fetchTokenPrices, getTokenPriceKey, type TokenPriceInput } from '@/data-sources/morpho-api/prices'; +import { fetchMajorPrices, type MajorPrices } from '@/utils/majorPrices'; +import { findToken, TokenPeg, supportedTokens } from '@/utils/tokens'; +import type { MarketUsdPriceSource } from '@/utils/types'; + +type ResolvedTokenPrices = { + prices: Map; + sources: Map; +}; + +const getPegCacheKey = (peg: TokenPeg, chainId: number) => `${peg}-${chainId}`; + +const isFinitePositive = (value: number | undefined): value is number => { + return value !== undefined && Number.isFinite(value) && value > 0; +}; + +export const dedupeTokenPriceInputs = (tokens: TokenPriceInput[]): TokenPriceInput[] => { + const uniqueTokens = new Map(); + + for (const token of tokens) { + const key = getTokenPriceKey(token.address, token.chainId); + if (!uniqueTokens.has(key)) { + uniqueTokens.set(key, { + address: token.address.toLowerCase(), + chainId: token.chainId, + }); + } + } + + return Array.from(uniqueTokens.values()); +}; + +export const augmentTokenPriceInputsWithPegRefs = (tokens: TokenPriceInput[]): TokenPriceInput[] => { + if (tokens.length === 0) { + return tokens; + } + + const neededPegs = new Set(); + const chainIds = new Set(); + + for (const token of tokens) { + chainIds.add(token.chainId); + const meta = findToken(token.address, token.chainId); + + if (meta?.peg === TokenPeg.ETH || meta?.peg === TokenPeg.BTC) { + neededPegs.add(meta.peg); + } + } + + if (neededPegs.size === 0) { + return tokens; + } + + const uniqueTokens = new Map(); + for (const token of tokens) { + uniqueTokens.set(getTokenPriceKey(token.address, token.chainId), token); + } + + for (const chainId of chainIds) { + for (const peg of neededPegs) { + const referenceToken = supportedTokens.find((token) => { + return token.peg === peg && token.networks.some((network) => network.chain.id === chainId); + }); + const network = referenceToken?.networks.find((candidate) => candidate.chain.id === chainId); + + if (!network) { + continue; + } + + const key = getTokenPriceKey(network.address, chainId); + if (!uniqueTokens.has(key)) { + uniqueTokens.set(key, { + address: network.address.toLowerCase(), + chainId, + }); + } + } + } + + return Array.from(uniqueTokens.values()); +}; + +const needsMajorPrices = (tokens: TokenPriceInput[]): boolean => { + return tokens.some((token) => { + const meta = findToken(token.address, token.chainId); + return meta?.peg === TokenPeg.ETH || meta?.peg === TokenPeg.BTC; + }); +}; + +export const resolveTokenPricesWithFallback = ({ + directPrices, + majorPrices, + stableTokens, + tokensWithPegRefs, +}: { + directPrices: Map; + majorPrices?: MajorPrices; + stableTokens: TokenPriceInput[]; + tokensWithPegRefs: TokenPriceInput[]; +}): Map => { + const resolvedPrices = new Map(directPrices); + const pegPricesByChain = new Map(); + let globalEthPrice: number | undefined = majorPrices?.[TokenPeg.ETH]; + let globalBtcPrice: number | undefined = majorPrices?.[TokenPeg.BTC]; + + for (const token of tokensWithPegRefs) { + const meta = findToken(token.address, token.chainId); + if (!meta?.peg || meta.peg === TokenPeg.USD) { + continue; + } + + const key = getTokenPriceKey(token.address, token.chainId); + const price = directPrices.get(key); + if (!isFinitePositive(price)) { + continue; + } + + const pegKey = getPegCacheKey(meta.peg, token.chainId); + if (!pegPricesByChain.has(pegKey)) { + pegPricesByChain.set(pegKey, price); + } + + if (meta.peg === TokenPeg.ETH && !globalEthPrice) { + globalEthPrice = price; + } + + if (meta.peg === TokenPeg.BTC && !globalBtcPrice) { + globalBtcPrice = price; + } + } + + const resolvePegPrice = (peg: TokenPeg, chainId: number): number | undefined => { + if (peg === TokenPeg.USD) { + return 1; + } + + const chainPrice = pegPricesByChain.get(getPegCacheKey(peg, chainId)); + if (isFinitePositive(chainPrice)) { + return chainPrice; + } + + if (peg === TokenPeg.ETH) { + return globalEthPrice; + } + + if (peg === TokenPeg.BTC) { + return globalBtcPrice; + } + + return undefined; + }; + + for (const token of stableTokens) { + const key = getTokenPriceKey(token.address, token.chainId); + if (resolvedPrices.has(key)) { + continue; + } + + const meta = findToken(token.address, token.chainId); + if (!meta?.peg) { + continue; + } + + const fallbackPrice = resolvePegPrice(meta.peg, token.chainId); + if (isFinitePositive(fallbackPrice)) { + resolvedPrices.set(key, fallbackPrice); + } + } + + return resolvedPrices; +}; + +export const resolveTokenPriceSources = ({ + directPrices, + resolvedPrices, + stableTokens, +}: { + directPrices: Map; + resolvedPrices: Map; + stableTokens: TokenPriceInput[]; +}): Map => { + const sources = new Map(); + + for (const token of stableTokens) { + const key = getTokenPriceKey(token.address, token.chainId); + const directPrice = directPrices.get(key); + + if (isFinitePositive(directPrice)) { + sources.set(key, 'direct'); + continue; + } + + const meta = findToken(token.address, token.chainId); + if (!meta?.peg) { + continue; + } + + const fallbackPrice = resolvedPrices.get(key); + if (isFinitePositive(fallbackPrice)) { + sources.set(key, 'peg'); + } + } + + return sources; +}; + +export const fetchResolvedTokenPrices = async (tokens: TokenPriceInput[]): Promise => { + const stableTokens = dedupeTokenPriceInputs(tokens); + + if (stableTokens.length === 0) { + return { + prices: new Map(), + sources: new Map(), + }; + } + + const tokensWithPegRefs = augmentTokenPriceInputsWithPegRefs(stableTokens); + const shouldFetchMajorPrices = needsMajorPrices(stableTokens); + + const [directPrices, majorPrices] = await Promise.all([ + fetchTokenPrices(tokensWithPegRefs), + shouldFetchMajorPrices ? fetchMajorPrices().catch(() => ({})) : Promise.resolve({} as MajorPrices), + ]); + + const resolvedPrices = resolveTokenPricesWithFallback({ + directPrices, + majorPrices, + stableTokens, + tokensWithPegRefs, + }); + const sources = resolveTokenPriceSources({ + directPrices, + resolvedPrices, + stableTokens, + }); + + return { + prices: resolvedPrices, + sources, + }; +}; diff --git a/src/data-sources/subgraph/historical.ts b/src/data-sources/subgraph/historical.ts index 90f0f317..62cb5d79 100644 --- a/src/data-sources/subgraph/historical.ts +++ b/src/data-sources/subgraph/historical.ts @@ -99,7 +99,6 @@ const transformSubgraphSnapshotsToHistoricalResult = ( const utilization = supplyNative > 0n ? Number((borrowNative * UTILIZATION_SCALE) / supplyNative) / Number(UTILIZATION_SCALE) : 0; - rates.apyAtTarget.push({ x: timestamp, y: 0 }); rates.utilization.push({ x: timestamp, y: utilization }); volumes.supplyAssetsUsd.push({ x: timestamp, y: supplyAssetsUsd }); diff --git a/src/data-sources/user-position.ts b/src/data-sources/user-position.ts index e89ad742..b62f891d 100644 --- a/src/data-sources/user-position.ts +++ b/src/data-sources/user-position.ts @@ -5,6 +5,7 @@ import { fetchMorphoUserPositionForMarket } from '@/data-sources/morpho-api/posi import { getErrorMessage, logDataSourceEvent } from '@/data-sources/shared/source-debug'; import { fetchSubgraphUserPositionForMarket } from '@/data-sources/subgraph/positions'; import type { CustomRpcUrls } from '@/stores/useCustomRpc'; +import { getChainScopedMarketKey } from '@/utils/marketIdentity'; import type { SupportedNetworks } from '@/utils/networks'; import type { MarketPosition } from '@/utils/types'; @@ -30,6 +31,43 @@ const hydratePositionMarket = async ( }; }; +const isMatchingPosition = (position: MarketPosition, marketUniqueKey: string, chainId: SupportedNetworks): boolean => { + const positionMarketKey = position.market?.uniqueKey; + const positionChainId = position.market?.morphoBlue?.chain?.id; + + if (!positionMarketKey || positionChainId == null) { + return false; + } + + return getChainScopedMarketKey(positionMarketKey, positionChainId) === getChainScopedMarketKey(marketUniqueKey, chainId); +}; + +const getValidatedPosition = ({ + chainId, + marketUniqueKey, + position, + source, +}: { + chainId: SupportedNetworks; + marketUniqueKey: string; + position: MarketPosition | null; + source: string; +}): MarketPosition | null => { + if (!position) { + return null; + } + + if (isMatchingPosition(position, marketUniqueKey, chainId)) { + return position; + } + + logDataSourceEvent('user-position', `discarded mismatched ${source} position payload`, { + chainId, + marketUniqueKey, + }); + return null; +}; + export const fetchUserPositionForMarket = async ( marketUniqueKey: string, userAddress: string, @@ -40,7 +78,12 @@ export const fetchUserPositionForMarket = async ( ): Promise => { if (hasEnvioIndexer()) { try { - const envioPosition = await fetchEnvioUserPositionForMarket(marketUniqueKey, userAddress, chainId, options); + const envioPosition = getValidatedPosition({ + chainId, + marketUniqueKey, + position: await fetchEnvioUserPositionForMarket(marketUniqueKey, userAddress, chainId, options), + source: 'Envio', + }); if (envioPosition) { logDataSourceEvent('user-position', 'using Envio position source', { @@ -60,7 +103,12 @@ export const fetchUserPositionForMarket = async ( if (supportsMorphoApi(chainId)) { try { - const morphoPosition = await fetchMorphoUserPositionForMarket(marketUniqueKey, userAddress, chainId); + const morphoPosition = getValidatedPosition({ + chainId, + marketUniqueKey, + position: await fetchMorphoUserPositionForMarket(marketUniqueKey, userAddress, chainId), + source: 'Morpho API', + }); if (morphoPosition) { logDataSourceEvent('user-position', 'using Morpho API fallback for position', { @@ -82,9 +130,20 @@ export const fetchUserPositionForMarket = async ( chainId, marketUniqueKey, }); - const subgraphPosition = await fetchSubgraphUserPositionForMarket(marketUniqueKey, userAddress, chainId); + let subgraphPosition: MarketPosition | null = null; + + try { + subgraphPosition = await fetchSubgraphUserPositionForMarket(marketUniqueKey, userAddress, chainId); + } catch (error) { + logDataSourceEvent('user-position', 'subgraph position fallback failed', { + chainId, + marketUniqueKey, + reason: getErrorMessage(error), + }); + return null; + } - if (!subgraphPosition) { + if (!subgraphPosition || !isMatchingPosition(subgraphPosition, marketUniqueKey, chainId)) { return null; } diff --git a/src/features/market-detail/components/borrows-table.tsx b/src/features/market-detail/components/borrows-table.tsx index 6691bc9d..693697bd 100644 --- a/src/features/market-detail/components/borrows-table.tsx +++ b/src/features/market-detail/components/borrows-table.tsx @@ -34,7 +34,7 @@ export function BorrowsTable({ chainId, market, minAssets, onOpenFiltersModal }: isLoading, isFetching, error, - } = useMarketBorrows(market?.uniqueKey, market.loanAsset.id, chainId, minAssets, currentPage, pageSize); + } = useMarketBorrows(market?.uniqueKey, market.loanAsset.address, chainId, minAssets, currentPage, pageSize); const borrows = paginatedData?.items ?? []; const totalCount = paginatedData?.totalCount ?? 0; diff --git a/src/features/market-detail/components/supplies-table.tsx b/src/features/market-detail/components/supplies-table.tsx index 02ef5efc..2e47b9e6 100644 --- a/src/features/market-detail/components/supplies-table.tsx +++ b/src/features/market-detail/components/supplies-table.tsx @@ -33,7 +33,7 @@ export function SuppliesTable({ chainId, market, minAssets, onOpenFiltersModal } data: paginatedData, isLoading, isFetching, - } = useMarketSupplies(market?.uniqueKey, market.loanAsset.id, chainId, minAssets, currentPage, pageSize); + } = useMarketSupplies(market?.uniqueKey, market.loanAsset.address, chainId, minAssets, currentPage, pageSize); const supplies = paginatedData?.items ?? []; const totalCount = paginatedData?.totalCount ?? 0; diff --git a/src/features/markets/components/table/market-row-detail.tsx b/src/features/markets/components/table/market-row-detail.tsx index 0ecf2437..2a6fe61c 100644 --- a/src/features/markets/components/table/market-row-detail.tsx +++ b/src/features/markets/components/table/market-row-detail.tsx @@ -32,9 +32,13 @@ export function ExpandedMarketDetail({ market }: { market: Market }) {

Available Liquidity

- - {formatReadable(Number(market.state.liquidityAssetsUsd))} - + {market.usdPriceSource === 'none' ? ( + '—' + ) : ( + + {formatReadable(Number(market.state.liquidityAssetsUsd))} + + )}

diff --git a/src/features/markets/components/table/market-table-body.tsx b/src/features/markets/components/table/market-table-body.tsx index 89c20e53..b4d3d27e 100644 --- a/src/features/markets/components/table/market-table-body.tsx +++ b/src/features/markets/components/table/market-table-body.tsx @@ -178,6 +178,7 @@ export function MarketTableBody({ currentEntries, expandedRowId, setExpandedRowI decimals={item.loanAsset.decimals} symbol={item.loanAsset.symbol} isEstimated={item.usdPriceSource === 'peg'} + isMissingUsd={item.usdPriceSource === 'none'} /> )} {columnVisibility.totalBorrow && ( @@ -188,6 +189,7 @@ export function MarketTableBody({ currentEntries, expandedRowId, setExpandedRowI decimals={item.loanAsset.decimals} symbol={item.loanAsset.symbol} isEstimated={item.usdPriceSource === 'peg'} + isMissingUsd={item.usdPriceSource === 'none'} /> )} {columnVisibility.liquidity && ( @@ -198,6 +200,7 @@ export function MarketTableBody({ currentEntries, expandedRowId, setExpandedRowI decimals={item.loanAsset.decimals} symbol={item.loanAsset.symbol} isEstimated={item.usdPriceSource === 'peg'} + isMissingUsd={item.usdPriceSource === 'none'} /> )} {columnVisibility.supplyAPY && ( diff --git a/src/features/markets/components/table/market-table-utils.tsx b/src/features/markets/components/table/market-table-utils.tsx index 9c2a547b..737667fb 100644 --- a/src/features/markets/components/table/market-table-utils.tsx +++ b/src/features/markets/components/table/market-table-utils.tsx @@ -70,6 +70,7 @@ export function TDTotalSupplyOrBorrow({ decimals, symbol, isEstimated = false, + isMissingUsd = false, }: { dataLabel: string; assetsUSD: number; @@ -77,6 +78,7 @@ export function TDTotalSupplyOrBorrow({ decimals: number; symbol: string; isEstimated?: boolean; + isMissingUsd?: boolean; }) { return (

- ${formatReadable(Number(assetsUSD))} + {isMissingUsd ? '—' : ${formatReadable(Number(assetsUSD))}}

{`${formatReadable(formatBalance(assets, decimals))} ${symbol}`}

diff --git a/src/graphql/envio-queries.ts b/src/graphql/envio-queries.ts index 222fdfbe..a822564d 100644 --- a/src/graphql/envio-queries.ts +++ b/src/graphql/envio-queries.ts @@ -216,3 +216,24 @@ export const envioBorrowRateUpdatesQuery = ` } } `; + +export const envioLatestBorrowRateUpdateBeforeQuery = ` + query EnvioLatestBorrowRateUpdateBefore($chainId: Int!, $marketId: String!, $timestampLte: BigInt!) { + AdaptiveCurveIrm_BorrowRateUpdate( + limit: 1 + where: { + chainId: { _eq: $chainId } + market_id: { _eq: $marketId } + timestamp: { _lte: $timestampLte } + } + order_by: [{ timestamp: desc }, { id: desc }] + ) { + avgBorrowRate + chainId + market_id + rateAtTarget + timestamp + txHash + } + } +`; diff --git a/src/graphql/morpho-api-queries.ts b/src/graphql/morpho-api-queries.ts index 67699c55..199c08cd 100644 --- a/src/graphql/morpho-api-queries.ts +++ b/src/graphql/morpho-api-queries.ts @@ -366,9 +366,10 @@ export const userTransactionsQuery = ` `; export const marketLiquidationsQuery = ` - query getMarketLiquidations($uniqueKey: String!, $first: Int, $skip: Int) { + query getMarketLiquidations($uniqueKey: String!, $chainId: Int!, $first: Int, $skip: Int) { transactions (where: { marketUniqueKey_in: [$uniqueKey], + chainId_in: [$chainId], type_in: [MarketLiquidation] }, first: $first, @@ -398,9 +399,10 @@ export const marketLiquidationsQuery = ` `; export const marketSuppliesQuery = ` - query getMarketSupplyActivities($uniqueKey: String!, $minAssets: BigInt!, $first: Int, $skip: Int) { + query getMarketSupplyActivities($uniqueKey: String!, $chainId: Int!, $minAssets: BigInt!, $first: Int, $skip: Int) { transactions (where: { marketUniqueKey_in: [$uniqueKey], + chainId_in: [$chainId], assets_gte: $minAssets, type_in: [MarketSupply, MarketWithdraw] }, @@ -432,9 +434,10 @@ export const marketSuppliesQuery = ` `; export const marketBorrowsQuery = ` - query getMarketBorrowActivities($uniqueKey: String!, $minAssets: BigInt, $first: Int, $skip: Int) { + query getMarketBorrowActivities($uniqueKey: String!, $chainId: Int!, $minAssets: BigInt, $first: Int, $skip: Int) { transactions (where: { marketUniqueKey_in: [$uniqueKey], + chainId_in: [$chainId], assets_gte: $minAssets, type_in: [MarketBorrow, MarketRepay] }, diff --git a/src/hooks/useTokenPrices.ts b/src/hooks/useTokenPrices.ts index 01091cf3..4af6752a 100644 --- a/src/hooks/useTokenPrices.ts +++ b/src/hooks/useTokenPrices.ts @@ -1,10 +1,8 @@ import { useMemo } from 'react'; import { useQuery } from '@tanstack/react-query'; -import { fetchTokenPrices, type TokenPriceInput } from '@/data-sources/morpho-api/prices'; -import { getTokenPriceKey } from '@/data-sources/morpho-api/prices'; -import { findToken, TokenPeg, supportedTokens } from '@/utils/tokens'; +import type { TokenPriceInput } from '@/data-sources/morpho-api/prices'; +import { fetchResolvedTokenPrices, dedupeTokenPriceInputs } from '@/data-sources/shared/token-prices'; import type { MarketUsdPriceSource } from '@/utils/types'; -import { fetchMajorPrices, type MajorPrices } from '@/utils/majorPrices'; // Query keys for token prices export const tokenPriceKeys = { @@ -31,180 +29,29 @@ type UseTokenPricesReturn = { * @param tokens - Array of token addresses and chain IDs to fetch prices for * @returns Object containing prices map, loading state, and error */ -const getPegCacheKey = (peg: TokenPeg, chainId: number) => `${peg}-${chainId}`; - -const isFinitePositive = (value: number | undefined): value is number => { - return value !== undefined && Number.isFinite(value) && value > 0; -}; - export const useTokenPrices = (tokens: TokenPriceInput[]): UseTokenPricesReturn => { // Memoize the token list to prevent unnecessary refetches const stableTokens = useMemo(() => { - // Deduplicate tokens based on address-chainId combination - const uniqueTokens = new Map(); - tokens.forEach((token) => { - const key = `${token.address.toLowerCase()}-${token.chainId}`; - if (!uniqueTokens.has(key)) { - uniqueTokens.set(key, { - address: token.address.toLowerCase(), - chainId: token.chainId, - }); - } - }); - return Array.from(uniqueTokens.values()); + return dedupeTokenPriceInputs(tokens); }, [tokens]); - // If we need ETH/BTC peg fallbacks, add reference tokens for those pegs on the same chains. - const tokensWithPegRefs = useMemo(() => { - if (stableTokens.length === 0) return stableTokens; - - const neededPegs = new Set(); - const chainIds = new Set(); - - stableTokens.forEach((token) => { - chainIds.add(token.chainId); - const meta = findToken(token.address, token.chainId); - if (meta?.peg === TokenPeg.ETH || meta?.peg === TokenPeg.BTC) { - neededPegs.add(meta.peg); - } - }); - - if (neededPegs.size === 0) return stableTokens; - - const uniqueTokens = new Map(); - stableTokens.forEach((token) => { - uniqueTokens.set(getTokenPriceKey(token.address, token.chainId), token); - }); - - chainIds.forEach((chainId) => { - neededPegs.forEach((peg) => { - const referenceToken = supportedTokens.find((token) => { - return token.peg === peg && token.networks.some((_network) => _network.chain.id === chainId); - }); - if (!referenceToken) return; - const network = referenceToken.networks.find((n) => n.chain.id === chainId); - if (!network) return; - - const key = getTokenPriceKey(network.address, chainId); - if (!uniqueTokens.has(key)) { - uniqueTokens.set(key, { address: network.address.toLowerCase(), chainId }); - } - }); - }); - - return Array.from(uniqueTokens.values()); - }, [stableTokens]); - const { - data: prices, + data: resolvedTokenPrices, isLoading, error, - } = useQuery, Error>({ - queryKey: tokenPriceKeys.tokens(tokensWithPegRefs), + } = useQuery<{ prices: Map; sources: Map }, Error>({ + queryKey: tokenPriceKeys.tokens(stableTokens), queryFn: async () => { - return fetchTokenPrices(tokensWithPegRefs); + return fetchResolvedTokenPrices(stableTokens); }, - enabled: tokensWithPegRefs.length > 0, + enabled: stableTokens.length > 0, staleTime: 5 * 60 * 1000, // 5 minutes gcTime: 10 * 60 * 1000, // 10 minutes }); - const needsMajorPrices = useMemo(() => { - return stableTokens.some((token) => { - const meta = findToken(token.address, token.chainId); - return meta?.peg === TokenPeg.ETH || meta?.peg === TokenPeg.BTC; - }); - }, [stableTokens]); - - const { data: majorPrices } = useQuery({ - queryKey: ['majorPrices'], - queryFn: fetchMajorPrices, - enabled: needsMajorPrices, - staleTime: 60_000, // 1 minute - gcTime: 5 * 60 * 1000, - }); - - const pricesWithFallback = useMemo(() => { - const basePrices = prices ?? new Map(); - const resolvedPrices = new Map(basePrices); - - // Cache peg reference prices by chain - const pegPricesByChain = new Map(); - let globalEthPrice: number | undefined = majorPrices?.[TokenPeg.ETH]; - let globalBtcPrice: number | undefined = majorPrices?.[TokenPeg.BTC]; - - tokensWithPegRefs.forEach((token) => { - const meta = findToken(token.address, token.chainId); - if (!meta?.peg || meta.peg === TokenPeg.USD) return; - const key = getTokenPriceKey(token.address, token.chainId); - const price = basePrices.get(key); - if (!isFinitePositive(price)) return; - - const pegKey = getPegCacheKey(meta.peg, token.chainId); - if (!pegPricesByChain.has(pegKey)) { - pegPricesByChain.set(pegKey, price); - } - - if (meta.peg === TokenPeg.ETH && !globalEthPrice) globalEthPrice = price; - if (meta.peg === TokenPeg.BTC && !globalBtcPrice) globalBtcPrice = price; - }); - - const resolvePegPrice = (peg: TokenPeg, chainId: number): number | undefined => { - if (peg === TokenPeg.USD) return 1; - const chainKey = getPegCacheKey(peg, chainId); - const chainPrice = pegPricesByChain.get(chainKey); - if (isFinitePositive(chainPrice)) return chainPrice; - if (peg === TokenPeg.ETH) return globalEthPrice; - if (peg === TokenPeg.BTC) return globalBtcPrice; - return undefined; - }; - - stableTokens.forEach((token) => { - const key = getTokenPriceKey(token.address, token.chainId); - if (resolvedPrices.has(key)) return; - - const meta = findToken(token.address, token.chainId); - if (!meta?.peg) return; - - const fallbackPrice = resolvePegPrice(meta.peg, token.chainId); - if (isFinitePositive(fallbackPrice)) { - resolvedPrices.set(key, fallbackPrice); - } - }); - - return resolvedPrices; - }, [prices, stableTokens, tokensWithPegRefs, majorPrices]); - - const priceSources = useMemo(() => { - const basePrices = prices ?? new Map(); - const resolvedSources = new Map(); - - stableTokens.forEach((token) => { - const key = getTokenPriceKey(token.address, token.chainId); - const directPrice = basePrices.get(key); - - if (isFinitePositive(directPrice)) { - resolvedSources.set(key, 'direct'); - return; - } - - const meta = findToken(token.address, token.chainId); - if (!meta?.peg) { - return; - } - - const fallbackPrice = pricesWithFallback.get(key); - if (isFinitePositive(fallbackPrice)) { - resolvedSources.set(key, 'peg'); - } - }); - - return resolvedSources; - }, [prices, pricesWithFallback, stableTokens]); - return { - prices: pricesWithFallback, - sources: priceSources, + prices: resolvedTokenPrices?.prices ?? new Map(), + sources: resolvedTokenPrices?.sources ?? new Map(), isLoading, error: error ?? null, }; diff --git a/src/hooks/useUserPosition.ts b/src/hooks/useUserPosition.ts index 41d2fd45..22201798 100644 --- a/src/hooks/useUserPosition.ts +++ b/src/hooks/useUserPosition.ts @@ -38,18 +38,17 @@ const useUserPosition = (user: string | undefined, chainId: SupportedNetworks | return null; } - if (!client) { - console.error('Public client not available'); - return null; - } - // 1. Try fetching the on-chain snapshot first let snapshot = null; - try { - snapshot = await fetchPositionSnapshot(marketKey, user as Address, chainId, undefined, client); - } catch (snapshotError) { - console.error(`Error fetching position snapshot for ${user} on market ${marketKey}:`, snapshotError); - // Snapshot fetch failed, will proceed to fallback fetch + if (client) { + try { + snapshot = await fetchPositionSnapshot(marketKey, user as Address, chainId, undefined, client); + } catch (snapshotError) { + console.error(`Error fetching position snapshot for ${user} on market ${marketKey}:`, snapshotError); + // Snapshot fetch failed, will proceed to fallback fetch + } + } else { + console.warn(`Public client not available for chain ${chainId}. Using indexed position fallback.`); } let finalPosition: MarketPosition | null = null; diff --git a/src/utils/tokenCatalog.ts b/src/utils/tokenCatalog.ts index 329e526d..1ffe9a6e 100644 --- a/src/utils/tokenCatalog.ts +++ b/src/utils/tokenCatalog.ts @@ -13,6 +13,7 @@ const PendleAssetSchema = z.object({ type PendleAsset = z.infer; const TOKEN_CATALOG_TTL_MS = 5 * 60 * 1000; +const PENDLE_FETCH_TIMEOUT_MS = 5_000; const PENDLE_SUPPORTED_CHAIN_IDS = [ SupportedNetworks.Mainnet, SupportedNetworks.Base, @@ -33,8 +34,13 @@ let tokenCatalogCache: | null = null; const fetchPendleAssets = async (chainId: number): Promise => { + const abortController = new AbortController(); + const timeoutHandle = globalThis.setTimeout(() => abortController.abort(), PENDLE_FETCH_TIMEOUT_MS); + try { - const response = await fetch(`https://api-v2.pendle.finance/core/v1/${chainId}/assets/all`); + const response = await fetch(`https://api-v2.pendle.finance/core/v1/${chainId}/assets/all`, { + signal: abortController.signal, + }); if (!response.ok) { return []; @@ -45,6 +51,8 @@ const fetchPendleAssets = async (chainId: number): Promise => { } catch (error) { console.error(`Error fetching Pendle assets for chain ${chainId}:`, error); return []; + } finally { + globalThis.clearTimeout(timeoutHandle); } }; @@ -96,13 +104,14 @@ export const fetchMergedTokenCatalog = async (): Promise => { const promise = (async () => { try { - const externalTokenGroups = await Promise.all( + const settledExternalTokenGroups = await Promise.allSettled( PENDLE_SUPPORTED_CHAIN_IDS.map(async (chainId) => { const assets = await fetchPendleAssets(chainId); return assets.map((asset) => convertPendleAssetToToken(asset, chainId)); }), ); + const externalTokenGroups = settledExternalTokenGroups.flatMap((result) => (result.status === 'fulfilled' ? [result.value] : [])); return mergeCatalogTokens(externalTokenGroups.flat()); } catch (error) { tokenCatalogCache = null; From 125c6848193456ed3fd197edd4fb28fbc0e2d427 Mon Sep 17 00:00:00 2001 From: antoncoding Date: Sat, 14 Mar 2026 22:24:11 +0800 Subject: [PATCH 4/5] refactor: remove subgraph --- AGENTS.md | 1 + docs/TECHNICAL_OVERVIEW.md | 79 +-- src/config/dataSources.ts | 21 - src/data-sources/envio/transactions.ts | 67 ++- src/data-sources/market-activity.ts | 85 ++- src/data-sources/market-catalog.ts | 143 ++--- src/data-sources/market-details.ts | 45 +- src/data-sources/market-historical.ts | 33 +- src/data-sources/market-participants.ts | 58 +-- .../morpho-api/market-metadata.ts | 136 +++++ src/data-sources/morpho-api/positions.ts | 17 +- src/data-sources/morpho-api/transactions.ts | 69 ++- src/data-sources/position-markets.ts | 30 +- src/data-sources/shared/market-merge.ts | 50 -- src/data-sources/shared/market-metadata.ts | 35 ++ src/data-sources/subgraph/historical.ts | 164 ------ src/data-sources/subgraph/market-borrowers.ts | 145 ------ src/data-sources/subgraph/market-borrows.ts | 104 ---- .../subgraph/market-liquidations.ts | 84 --- src/data-sources/subgraph/market-suppliers.ts | 116 ----- src/data-sources/subgraph/market-supplies.ts | 105 ---- src/data-sources/subgraph/market.ts | 253 --------- src/data-sources/subgraph/positions.ts | 202 ------- src/data-sources/subgraph/transactions.ts | 217 -------- src/data-sources/subgraph/types.ts | 65 --- src/data-sources/user-position.ts | 52 +- .../transaction-history-preview.tsx | 30 +- .../components/borrows-table.tsx | 2 +- .../components/supplies-table.tsx | 2 +- .../components/history-tab.tsx | 7 +- .../position-detail/position-view.tsx | 7 +- src/graphql/morpho-api-queries.ts | 51 ++ src/graphql/morpho-subgraph-queries.ts | 492 ------------------ src/hooks/queries/fetchUserTransactions.ts | 109 ++-- src/hooks/queries/useMarketMetadataQuery.ts | 22 + src/hooks/queries/useMarketsMetadataQuery.ts | 17 + src/hooks/queries/useMarketsQuery.ts | 6 +- src/hooks/queries/useUserTransactionsQuery.ts | 82 +-- src/hooks/useMarketBorrows.ts | 22 +- src/hooks/useMarketData.ts | 18 +- src/hooks/useMarketLiquidations.ts | 1 - src/hooks/useMarketSupplies.ts | 22 +- src/hooks/usePositionReport.ts | 37 +- src/hooks/useProcessedMarkets.ts | 27 +- src/hooks/useUserPosition.ts | 6 +- src/hooks/useUserPositionsSummaryData.ts | 22 +- src/utils/subgraph-types.ts | 81 --- src/utils/subgraph-urls.ts | 42 -- src/utils/types.ts | 10 + src/utils/user-transaction-history-cache.ts | 4 +- 50 files changed, 788 insertions(+), 2707 deletions(-) create mode 100644 src/data-sources/morpho-api/market-metadata.ts delete mode 100644 src/data-sources/shared/market-merge.ts create mode 100644 src/data-sources/shared/market-metadata.ts delete mode 100644 src/data-sources/subgraph/historical.ts delete mode 100644 src/data-sources/subgraph/market-borrowers.ts delete mode 100644 src/data-sources/subgraph/market-borrows.ts delete mode 100644 src/data-sources/subgraph/market-liquidations.ts delete mode 100644 src/data-sources/subgraph/market-suppliers.ts delete mode 100644 src/data-sources/subgraph/market-supplies.ts delete mode 100644 src/data-sources/subgraph/market.ts delete mode 100644 src/data-sources/subgraph/positions.ts delete mode 100644 src/data-sources/subgraph/transactions.ts delete mode 100644 src/data-sources/subgraph/types.ts delete mode 100644 src/graphql/morpho-subgraph-queries.ts create mode 100644 src/hooks/queries/useMarketMetadataQuery.ts create mode 100644 src/hooks/queries/useMarketsMetadataQuery.ts delete mode 100644 src/utils/subgraph-types.ts delete mode 100644 src/utils/subgraph-urls.ts diff --git a/AGENTS.md b/AGENTS.md index 84b5fd65..33b3d43d 100644 --- a/AGENTS.md +++ b/AGENTS.md @@ -176,6 +176,7 @@ When touching transaction and position flows, validation MUST include all releva 40. **Indexer market pre-hydration integrity**: shared market-catalog/indexer adapters must exclude structurally invalid markets (for example zero-address IRM or collateral token, plus local blacklist gates) before token-metadata hydration or enrichment begins, and source logs must distinguish raw fetch completion from downstream enrichment so slow stages are attributable. 41. **Pagination completeness integrity**: shared pagination utilities must validate positive page sizes/non-negative limits, and any paged market/participant/history source must either fetch complete results or fail closed to fallback. Do not silently cap user-visible datasets at arbitrary ceilings or treat incomplete/invalid pages as successful partial data. 42. **Indexed empty-result integrity**: shared indexer-backed market/position/transaction adapters must distinguish a legitimate empty domain result from an internal hydration/filtering failure. Empty arrays/zero counts are authoritative only when the adapter completed without downstream mapping, metadata, or market-hydration errors; otherwise throw and let the shared fallback layer decide. +43. **Cross-chain adapter ownership integrity**: shared market/position/history adapters must own chain fan-out, pagination, and source fallback internally. Feature hooks and UI call sites must request the needed chain scope, not reintroduce per-chain loops, parallel legacy-source requests, or source-specific branching above the adapter boundary. ### REQUIRED: Regression Rule Capture diff --git a/docs/TECHNICAL_OVERVIEW.md b/docs/TECHNICAL_OVERVIEW.md index e120664c..abce5619 100644 --- a/docs/TECHNICAL_OVERVIEW.md +++ b/docs/TECHNICAL_OVERVIEW.md @@ -6,7 +6,7 @@ Monarch is a client-side DeFi dashboard for the Morpho Blue lending protocol. It **Key Architectural Decisions:** - Next.js 15 App Router with React 18 -- Dual data source strategy: Morpho API (primary) → Subgraph (fallback) +- Composed market data strategy: Morpho API metadata + Envio indexed state + RPC enrichment - Zustand for client state, React Query for server state - All user data in localStorage (no backend DB) - Multi-chain support with custom RPC override capability @@ -148,16 +148,15 @@ MorphoChainlinkOracleData { ## Data Sources -### Dual-Source Strategy +### Market Data Strategy ``` -Primary: Morpho API (https://blue-api.morpho.org/graphql) - ↓ (if unavailable or unsupported chain) -Fallback: Subgraph (The Graph / Goldsky) +Envio/Monarch → primary indexed market state, positions, and transactions +Morpho API → indexed fallback + separate market metadata +RPC → fresh snapshots and derived enrichments +Subgraph → kept only for Vault V2 / adapter discovery ``` -**Morpho API Supported Chains:** Mainnet, Base, Unichain, Polygon, Arbitrum, HyperEVM, Monad - ### Static Data (Build-time or cached) | Data Type | Source | Location | |-----------|--------|----------| @@ -170,29 +169,32 @@ Fallback: Subgraph (The Graph / Goldsky) ### Dynamic Data (Runtime fetched) | Data Type | Source | Refresh | Query Hook | |-----------|--------|---------|------------| -| Markets list | Morpho API/Subgraph | 5 min stale | `useMarketsQuery` | +| Markets list | Envio + Morpho fallback + RPC enrichment | 5 min stale | `useMarketsQuery` | | Market metrics (flows, trending) | Monarch API | 5 min stale | `useMarketMetricsQuery` | -| Market state (APY, utilization) | Morpho API | 30s stale | `useMarketData` | -| User positions | Morpho API + on-chain | 5 min | `useUserPositions` | +| Market state (APY, utilization) | Envio + Morpho fallback + on-chain | 30s stale | `useMarketData` | +| User positions | Envio/Morpho + on-chain | 5 min | `useUserPositions` | | Vaults list | Morpho API | 5 min | `useAllMorphoVaultsQuery` | | Vault allocations | On-chain (Wagmi) | On demand | `useAllocations` | | Token balances | On-chain multicall | 5 min | `useUserBalancesQuery` | | Oracle prices | Morpho API | 5 min | `useOracleDataQuery` | | Merkl rewards | Merkl API | On demand | `useMerklCampaignsQuery` | -| Market liquidations | Morpho API/Subgraph | 5 min stale | `useMarketLiquidations` | +| Market liquidations | Envio/Morpho | 5 min stale | `useMarketLiquidations` | ### Data Flow Patterns **Market Data Flow:** ``` -Raw API fetch → Blacklist filtering → Oracle enrichment → -Split: allMarkets vs whitelistedMarkets +1. Fetch indexed market state from Envio, or fall back to Morpho API +2. Apply blacklist and USD/target-rate/historical APY enrichments +3. Fetch Morpho market metadata separately (warnings, trusted-by / supplying vaults) +4. Recombine metadata at the hook layer for UI consumers that need it +5. Split: allMarkets vs whitelistedMarkets ``` **Position Data Flow:** ``` -1. Fetch market keys from API (which markets user has positions in) -2. Fetch on-chain snapshots per market (usePositionSnapshots) +1. Discover user position markets from cross-chain Envio or cross-chain Morpho fallback +2. Fetch on-chain snapshots per chain (usePositionSnapshots) 3. Combine with market metadata 4. Group by loan asset 5. Calculate earnings @@ -250,7 +252,7 @@ All hooks in `/src/hooks/queries/` follow React Query patterns: | Hook | Key | Stale Time | Refetch | Focus | |------|-----|------------|---------|-------| -| `useMarketsQuery` | `['markets']` | 5 min | 5 min | Yes | +| `useMarketsQuery` | `['markets', rpcConfigVersion]` | 5 min | 5 min | Yes | | `useMarketMetricsQuery` | `['market-metrics', ...]` | 5 min | 5 min | No | | `useTokensQuery` | `['tokens']` | 5 min | 5 min | Yes | | `useOracleDataQuery` | `['oracle-data']` | 5 min | 5 min | Yes | @@ -260,18 +262,22 @@ All hooks in `/src/hooks/queries/` follow React Query patterns: | `useUserTransactionsQuery` | `['user-transactions', ...]` | 60s | - | No | | `useAllocationsQuery` | `['vault-allocations', ...]` | 30s | - | No | -### Data Source Switching +### Data Source Composition **File:** `/src/config/dataSources.ts` ``` -supportsMorphoApi(network) returns true for: -- Mainnet, Base, Unichain, Polygon, Arbitrum, HyperEVM, Monad +Envio endpoint present: +- use Envio for indexed state/events/positions/history when available + +Market reads: +- use Envio first for indexed market data +- fall back to Morpho-only indexed data if Envio fails +- fetch Morpho market metadata separately for warnings / supplying vaults -Fallback Strategy: -1. IF supportsMorphoApi(network) → Try Morpho API -2. IF API fails OR unsupported → Try Subgraph -3. Each network fails independently (partial data OK) +Position/history reads: +- use cross-chain Envio first +- fall back to cross-chain Morpho ``` ### GraphQL Fetchers @@ -282,9 +288,8 @@ Fallback Strategy: - Throws on GraphQL errors (strict) **Subgraph** (`/src/data-sources/subgraph/fetchers.ts`): +- Used only for Vault V2 / adapter discovery - Configurable URL per network -- Logs GraphQL errors but continues (lenient) -- Adds price estimation for unknown tokens ### Complete Data Flow: Market Data @@ -294,25 +299,26 @@ Fallback Strategy: 2. Parallel queries start: - usePublicClient() for on-chain reads - useOracleDataQuery() for oracle enrichment + - useMarketMetadataQuery() for Morpho metadata ↓ 3. Market fetch: a. Try on-chain snapshot (viem multicall) - b. Try Morpho API (if supported) - c. Fallback to Subgraph - d. Merge snapshot with API state + b. Fetch indexed market details (Envio first, Morpho fallback) + c. Merge snapshot with indexed state ↓ -4. Oracle enrichment via useMemo() +4. Recombine metadata + oracle enrichment via useMemo() ↓ 5. Return { data: enrichedMarket, isLoading, error } ``` ### Key Patterns -1. **Fallback Chain**: API → Subgraph → Empty -2. **Parallel Execution**: `Promise.all()` for multi-network -3. **Graceful Degradation**: Partial data > Error -4. **Two-Phase Market**: On-chain snapshot + API state -5. **Hybrid Caching**: Static JSON + dynamic API (oracles) +1. **Split Indexed Data From Metadata**: indexed state and metadata are fetched separately and recombined only at shared hook boundaries +2. **Envio First For Indexed Reads**: Morpho API is fallback-only for market/position/history state +3. **Cross-Chain First**: adapter boundaries accept multiple chains and own pagination internally +4. **Chain-Scoped Identity**: market and transaction identity uses `chainId + uniqueKey` +5. **RPC Loops Only Where Necessary**: snapshots and historical enrichments stay per chain +6. **Graceful Degradation**: fallback indexed data beats empty, and metadata failure must not blank primary state --- @@ -321,8 +327,9 @@ Fallback Strategy: ### APIs | Service | Endpoint | Purpose | |---------|----------|---------| -| Morpho API | `https://blue-api.morpho.org/graphql` | Markets, vaults, positions | -| The Graph | Per-chain subgraph URLs | Fallback data, suppliers, borrowers | +| Morpho API | `https://blue-api.morpho.org/graphql` | Market metadata, vaults, fallback positions/history | +| Envio / Monarch indexer | Configured via `NEXT_PUBLIC_ENVIO_INDEXER_ENDPOINT` | Indexed markets, positions, participants, transactions | +| The Graph | Per-chain subgraph URLs | Vault V2 / adapter discovery only | | Merkl API | `https://api.merkl.xyz` | Reward campaigns | | Velora API | `https://api.paraswap.io` | Swap quotes and executable tx payloads | | Alchemy | Per-chain RPC | Default RPC provider | diff --git a/src/config/dataSources.ts b/src/config/dataSources.ts index 07566ca6..27b36da0 100644 --- a/src/config/dataSources.ts +++ b/src/config/dataSources.ts @@ -1,29 +1,8 @@ -import { SupportedNetworks } from '@/utils/networks'; - type EnvioIndexerConfig = { endpoint: string; apiKey?: string; }; -/** - * Check if a network supports Morpho API as a data source - */ -export const supportsMorphoApi = (network: SupportedNetworks): boolean => { - switch (network) { - case SupportedNetworks.Mainnet: - case SupportedNetworks.Base: - case SupportedNetworks.Unichain: - case SupportedNetworks.Polygon: - case SupportedNetworks.Arbitrum: - case SupportedNetworks.HyperEVM: - case SupportedNetworks.Monad: - return true; - - default: - return false; - } -}; - const getTrimmedEnv = (value: string | undefined): string | undefined => { const trimmed = value?.trim(); return trimmed ? trimmed : undefined; diff --git a/src/data-sources/envio/transactions.ts b/src/data-sources/envio/transactions.ts index 1fe89faf..b091baec 100644 --- a/src/data-sources/envio/transactions.ts +++ b/src/data-sources/envio/transactions.ts @@ -20,14 +20,19 @@ const sortTransactionsByTimestampDescending = (transactions: UserTransaction[]): return transactions.sort((left, right) => right.timestamp - left.timestamp); }; +const resolveChainIds = (filters: TransactionFilters): number[] => { + return [...new Set(filters.chainIds ?? (filters.chainId != null ? [filters.chainId] : []))]; +}; + const buildAddressFilter = (addresses: string[]) => ({ _in: addresses.map((address) => address.toLowerCase()), }); const buildSharedWhereClause = (filters: TransactionFilters) => { + const chainIds = resolveChainIds(filters); const where: Record = { chainId: { - _eq: filters.chainId, + _in: chainIds, }, }; @@ -65,11 +70,9 @@ const buildLiquidationWhere = (filters: TransactionFilters) => ({ const matchesAssetFilter = async ({ assetIds, - chainId, transactions, }: { assetIds: string[] | undefined; - chainId: SupportedNetworks; transactions: UserTransaction[]; }): Promise => { if (!assetIds || assetIds.length === 0 || transactions.length === 0) { @@ -77,53 +80,62 @@ const matchesAssetFilter = async ({ } const normalizedAssetIds = new Set(assetIds.map((assetId) => assetId.toLowerCase())); - const uniqueMarketIds = [...new Set(transactions.map((transaction) => transaction.data.market.uniqueKey.toLowerCase()))]; - const envioMarketMap = await fetchEnvioMarketsByKeys( - uniqueMarketIds.map((marketId) => ({ - chainId, - marketUniqueKey: marketId, - })), - ).catch(() => new Map()); + const uniqueMarketRequests = new Map(); + + for (const transaction of transactions) { + uniqueMarketRequests.set(getChainScopedMarketKey(transaction.data.market.uniqueKey, transaction.chainId), { + chainId: transaction.chainId as SupportedNetworks, + marketUniqueKey: transaction.data.market.uniqueKey.toLowerCase(), + }); + } + + const envioMarketMap = await fetchEnvioMarketsByKeys(Array.from(uniqueMarketRequests.values())).catch(() => new Map()); const marketMap = new Map>>(); - const missingMarketIds: string[] = []; + const missingMarketRequests: { chainId: SupportedNetworks; marketUniqueKey: string }[] = []; - for (const marketId of uniqueMarketIds) { - const marketKey = getChainScopedMarketKey(marketId, chainId); + for (const marketRequest of uniqueMarketRequests.values()) { + const marketKey = getChainScopedMarketKey(marketRequest.marketUniqueKey, marketRequest.chainId); const envioMarket = envioMarketMap.get(marketKey); if (envioMarket) { - marketMap.set(marketId, envioMarket); + marketMap.set(marketKey, envioMarket); continue; } - missingMarketIds.push(marketId); + missingMarketRequests.push(marketRequest); } const fallbackResults = await Promise.allSettled( - missingMarketIds.map((marketId) => fetchMarketDetails(marketId, chainId, { enrichHistoricalApys: false })), + missingMarketRequests.map((marketRequest) => + fetchMarketDetails(marketRequest.marketUniqueKey, marketRequest.chainId, { enrichHistoricalApys: false }), + ), ); for (const [index, result] of fallbackResults.entries()) { if (result.status === 'fulfilled' && result.value) { - marketMap.set(missingMarketIds[index]!, result.value); + const marketRequest = missingMarketRequests[index]; + if (marketRequest) { + marketMap.set(getChainScopedMarketKey(marketRequest.marketUniqueKey, marketRequest.chainId), result.value); + } } } - if (marketMap.size !== uniqueMarketIds.length) { + if (marketMap.size !== uniqueMarketRequests.size) { throw new Error( - `Failed to hydrate ${uniqueMarketIds.length - marketMap.size} Envio transaction markets for asset filtering on chain ${chainId}`, + `Failed to hydrate ${uniqueMarketRequests.size - marketMap.size} Envio transaction markets for asset filtering`, ); } return transactions.filter((transaction) => { - const market = marketMap.get(transaction.data.market.uniqueKey.toLowerCase()); + const marketKey = getChainScopedMarketKey(transaction.data.market.uniqueKey, transaction.chainId); + const market = marketMap.get(marketKey); if (!market) { - throw new Error(`Missing hydrated market for Envio transaction ${transaction.hash} on chain ${chainId}`); + throw new Error(`Missing hydrated market for Envio transaction ${transaction.hash} on chain ${transaction.chainId}`); } const isCollateralTransaction = transaction.type === UserTxTypes.MarketSupplyCollateral || transaction.type === UserTxTypes.MarketWithdrawCollateral; const relevantAsset = isCollateralTransaction ? market.collateralAsset.address : market.loanAsset.address; - const canonicalAssetId = infoToKey(relevantAsset, chainId); + const canonicalAssetId = infoToKey(relevantAsset, transaction.chainId); return normalizedAssetIds.has(relevantAsset.toLowerCase()) || normalizedAssetIds.has(canonicalAssetId); }); @@ -131,6 +143,7 @@ const matchesAssetFilter = async ({ const toUserTransaction = ({ assets, + chainId, marketId, shares, timestamp, @@ -138,12 +151,14 @@ const toUserTransaction = ({ type, }: { assets: string | number; + chainId: number; marketId: string; shares?: string | number; timestamp: string | number; txHash: string; type: UserTxTypes; }): UserTransaction => ({ + chainId, data: { __typename: type, assets: normalizeEnvioString(assets), @@ -176,6 +191,7 @@ export const fetchEnvioTransactions = async (filters: TransactionFilters): Promi ...supplyEvents.map((event) => toUserTransaction({ assets: event.assets, + chainId: event.chainId, marketId: event.market_id, shares: event.shares, timestamp: event.timestamp, @@ -186,6 +202,7 @@ export const fetchEnvioTransactions = async (filters: TransactionFilters): Promi ...withdrawEvents.map((event) => toUserTransaction({ assets: event.assets, + chainId: event.chainId, marketId: event.market_id, shares: event.shares, timestamp: event.timestamp, @@ -196,6 +213,7 @@ export const fetchEnvioTransactions = async (filters: TransactionFilters): Promi ...borrowEvents.map((event) => toUserTransaction({ assets: event.assets, + chainId: event.chainId, marketId: event.market_id, shares: event.shares, timestamp: event.timestamp, @@ -206,6 +224,7 @@ export const fetchEnvioTransactions = async (filters: TransactionFilters): Promi ...repayEvents.map((event) => toUserTransaction({ assets: event.assets, + chainId: event.chainId, marketId: event.market_id, shares: event.shares, timestamp: event.timestamp, @@ -216,6 +235,7 @@ export const fetchEnvioTransactions = async (filters: TransactionFilters): Promi ...supplyCollateralEvents.map((event) => toUserTransaction({ assets: event.assets, + chainId: event.chainId, marketId: event.market_id, timestamp: event.timestamp, txHash: event.txHash, @@ -225,6 +245,7 @@ export const fetchEnvioTransactions = async (filters: TransactionFilters): Promi ...withdrawCollateralEvents.map((event) => toUserTransaction({ assets: event.assets, + chainId: event.chainId, marketId: event.market_id, timestamp: event.timestamp, txHash: event.txHash, @@ -234,6 +255,7 @@ export const fetchEnvioTransactions = async (filters: TransactionFilters): Promi ...liquidations.map((event) => toUserTransaction({ assets: event.repaidAssets, + chainId: event.chainId, marketId: event.market_id, shares: event.repaidShares, timestamp: event.timestamp, @@ -246,7 +268,6 @@ export const fetchEnvioTransactions = async (filters: TransactionFilters): Promi items = sortTransactionsByTimestampDescending(items); items = await matchesAssetFilter({ assetIds: filters.assetIds, - chainId: filters.chainId, transactions: items, }); diff --git a/src/data-sources/market-activity.ts b/src/data-sources/market-activity.ts index 723ebdb6..63374a78 100644 --- a/src/data-sources/market-activity.ts +++ b/src/data-sources/market-activity.ts @@ -1,4 +1,4 @@ -import { hasEnvioIndexer, supportsMorphoApi } from '@/config/dataSources'; +import { hasEnvioIndexer } from '@/config/dataSources'; import { fetchEnvioMarketBorrows, fetchEnvioMarketLiquidations, @@ -8,15 +8,11 @@ import { fetchMorphoMarketBorrows } from '@/data-sources/morpho-api/market-borro import { fetchMorphoMarketLiquidations } from '@/data-sources/morpho-api/market-liquidations'; import { fetchMorphoMarketSupplies } from '@/data-sources/morpho-api/market-supplies'; import { getErrorMessage, logDataSourceEvent } from '@/data-sources/shared/source-debug'; -import { fetchSubgraphMarketBorrows } from '@/data-sources/subgraph/market-borrows'; -import { fetchSubgraphMarketLiquidations } from '@/data-sources/subgraph/market-liquidations'; -import { fetchSubgraphMarketSupplies } from '@/data-sources/subgraph/market-supplies'; import type { SupportedNetworks } from '@/utils/networks'; import type { MarketLiquidationTransaction, PaginatedMarketActivityTransactions } from '@/utils/types'; export const fetchMarketSupplies = async ( marketId: string, - loanAssetId: string, network: SupportedNetworks, minAssets = '0', pageSize = 8, @@ -34,28 +30,24 @@ export const fetchMarketSupplies = async ( } } - if (supportsMorphoApi(network)) { - try { - return await fetchMorphoMarketSupplies(marketId, network, minAssets, pageSize, skip); - } catch (error) { - logDataSourceEvent('market-supplies', 'Morpho API supplies fetch failed, falling back to subgraph', { - chainId: network, - marketUniqueKey: marketId, - reason: getErrorMessage(error), - }); - } + try { + return await fetchMorphoMarketSupplies(marketId, network, minAssets, pageSize, skip); + } catch (error) { + logDataSourceEvent('market-supplies', 'Morpho API supplies fetch failed', { + chainId: network, + marketUniqueKey: marketId, + reason: getErrorMessage(error), + }); } - logDataSourceEvent('market-supplies', 'using subgraph fallback for supplies', { - chainId: network, - marketUniqueKey: marketId, - }); - return fetchSubgraphMarketSupplies(marketId, loanAssetId, network, minAssets, pageSize, skip); + return { + items: [], + totalCount: 0, + }; }; export const fetchMarketBorrows = async ( marketId: string, - loanAssetId: string, network: SupportedNetworks, minAssets = '0', pageSize = 8, @@ -73,23 +65,20 @@ export const fetchMarketBorrows = async ( } } - if (supportsMorphoApi(network)) { - try { - return await fetchMorphoMarketBorrows(marketId, network, minAssets, pageSize, skip); - } catch (error) { - logDataSourceEvent('market-borrows', 'Morpho API borrows fetch failed, falling back to subgraph', { - chainId: network, - marketUniqueKey: marketId, - reason: getErrorMessage(error), - }); - } + try { + return await fetchMorphoMarketBorrows(marketId, network, minAssets, pageSize, skip); + } catch (error) { + logDataSourceEvent('market-borrows', 'Morpho API borrows fetch failed', { + chainId: network, + marketUniqueKey: marketId, + reason: getErrorMessage(error), + }); } - logDataSourceEvent('market-borrows', 'using subgraph fallback for borrows', { - chainId: network, - marketUniqueKey: marketId, - }); - return fetchSubgraphMarketBorrows(marketId, loanAssetId, network, minAssets, pageSize, skip); + return { + items: [], + totalCount: 0, + }; }; export const fetchMarketLiquidations = async ( @@ -108,21 +97,15 @@ export const fetchMarketLiquidations = async ( } } - if (supportsMorphoApi(network)) { - try { - return await fetchMorphoMarketLiquidations(marketId, network); - } catch (error) { - logDataSourceEvent('market-liquidations', 'Morpho API liquidations fetch failed, falling back to subgraph', { - chainId: network, - marketUniqueKey: marketId, - reason: getErrorMessage(error), - }); - } + try { + return await fetchMorphoMarketLiquidations(marketId, network); + } catch (error) { + logDataSourceEvent('market-liquidations', 'Morpho API liquidations fetch failed', { + chainId: network, + marketUniqueKey: marketId, + reason: getErrorMessage(error), + }); } - logDataSourceEvent('market-liquidations', 'using subgraph fallback for liquidations', { - chainId: network, - marketUniqueKey: marketId, - }); - return fetchSubgraphMarketLiquidations(marketId, network); + return []; }; diff --git a/src/data-sources/market-catalog.ts b/src/data-sources/market-catalog.ts index 8f315e54..b9841cfb 100644 --- a/src/data-sources/market-catalog.ts +++ b/src/data-sources/market-catalog.ts @@ -1,12 +1,11 @@ -import { hasEnvioIndexer, supportsMorphoApi } from '@/config/dataSources'; +import { hasEnvioIndexer } from '@/config/dataSources'; import { fetchEnvioMarkets } from '@/data-sources/envio/market'; -import { fetchMorphoMarkets, fetchMorphoMarketsMultiChain } from '@/data-sources/morpho-api/market'; -import { mergeMarketsByIdentity } from '@/data-sources/shared/market-merge'; +import { fetchMorphoMarketsMultiChain } from '@/data-sources/morpho-api/market'; +import { toIndexedMarket } from '@/data-sources/shared/market-metadata'; import { filterTokenBlacklistedMarkets } from '@/data-sources/shared/market-visibility'; import { enrichMarketsWithHistoricalApysWithinTimeout } from '@/data-sources/shared/market-rate-enrichment'; import { enrichMarketsWithTargetRate } from '@/data-sources/shared/market-target-rate-enrichment'; import { getErrorMessage, logDataSourceEvent } from '@/data-sources/shared/source-debug'; -import { fetchSubgraphMarkets } from '@/data-sources/subgraph/market'; import type { CustomRpcUrls } from '@/stores/useCustomRpc'; import { ALL_SUPPORTED_NETWORKS, type SupportedNetworks } from '@/utils/networks'; import type { Market } from '@/utils/types'; @@ -57,43 +56,6 @@ const enrichCatalogMarketsWithLogging = async ( return enrichedMarkets; }; -const getMissingChainIds = (chainIds: SupportedNetworks[], markets: Market[]): SupportedNetworks[] => { - const coveredChainIds = new Set(markets.map((market) => market.morphoBlue.chain.id)); - return chainIds.filter((chainId) => !coveredChainIds.has(chainId)); -}; - -const fetchMarketsForNetwork = async (network: SupportedNetworks): Promise => { - logDataSourceEvent('market-catalog', 'fetching fallback markets for chain', { - chainId: network, - primary: supportsMorphoApi(network) ? 'morpho' : 'subgraph', - }); - - if (supportsMorphoApi(network)) { - try { - return await fetchMorphoMarkets(network); - } catch (morphoError) { - try { - return await fetchSubgraphMarkets(network); - } catch (subgraphError) { - throw new Error( - `Failed to fetch markets for chain ${network}: Morpho API failed (${getErrorMessage(morphoError)}); Subgraph failed (${getErrorMessage(subgraphError)})`, - ); - } - } - } - - try { - return await fetchSubgraphMarkets(network); - } catch (subgraphError) { - throw new Error(`Failed to fetch markets for chain ${network}: Subgraph failed (${getErrorMessage(subgraphError)})`); - } -}; - -const fetchMarketsPerNetworkFallback = async (chainIds: SupportedNetworks[]): Promise => { - const results = await Promise.all(chainIds.map((network) => fetchMarketsForNetwork(network))); - return filterTokenBlacklistedMarkets(results.flat()); -}; - export const fetchMarketCatalog = async ( chainIds: SupportedNetworks[] = ALL_SUPPORTED_NETWORKS, options: { @@ -101,107 +63,70 @@ export const fetchMarketCatalog = async ( } = {}, ): Promise => { const { customRpcUrls } = options; + let indexedMarkets: Market[] = []; + let source: 'envio' | 'morpho' = 'morpho'; if (hasEnvioIndexer()) { try { - logDataSourceEvent('market-catalog', 'fetching Envio market catalog', { - chainIds: chainIds.join(','), - }); - const envioFetchStartedAt = Date.now(); - const envioMarkets = await withTimeout( - fetchEnvioMarkets(chainIds, { - customRpcUrls, - }), - ENVIO_MARKET_CATALOG_TIMEOUT_MS, - 'Envio market catalog', + const envioMarkets = filterTokenBlacklistedMarkets( + (await withTimeout( + fetchEnvioMarkets(chainIds, { + customRpcUrls, + }), + ENVIO_MARKET_CATALOG_TIMEOUT_MS, + 'Envio market catalog', + )).map(toIndexedMarket), ); - const envioFetchDurationMs = Date.now() - envioFetchStartedAt; - const missingChainIds = getMissingChainIds(chainIds, envioMarkets); - if (missingChainIds.length === 0 && envioMarkets.length > 0) { - logDataSourceEvent('market-catalog', 'Envio fetch completed; using Envio as primary source', { + if (envioMarkets.length > 0) { + logDataSourceEvent('market-catalog', 'using Envio market catalog', { chainIds: chainIds.join(','), count: envioMarkets.length, - durationMs: envioFetchDurationMs, }); - return enrichCatalogMarketsWithLogging(envioMarkets, customRpcUrls, { + indexedMarkets = envioMarkets; + source = 'envio'; + } else { + logDataSourceEvent('market-catalog', 'Envio market catalog returned no usable markets, falling back', { chainIds: chainIds.join(','), - source: 'envio-primary', - }); - } - - logDataSourceEvent('market-catalog', 'Envio fetch completed with missing chains; falling back for those chains', { - requestedChainIds: chainIds.join(','), - coveredChainIds: [...new Set(envioMarkets.map((market) => market.morphoBlue.chain.id))].join(','), - missingChainIds: missingChainIds.join(','), - envioCount: envioMarkets.length, - durationMs: envioFetchDurationMs, - }); - - const fallbackMarkets = missingChainIds.length > 0 ? await fetchMarketsPerNetworkFallback(missingChainIds) : []; - const mergedMarkets = mergeMarketsByIdentity([...envioMarkets, ...fallbackMarkets]); - - if (mergedMarkets.length > 0) { - logDataSourceEvent('market-catalog', 'merged Envio with fallback markets', { - fallbackChainIds: missingChainIds.join(','), - fallbackCount: fallbackMarkets.length, - totalCount: mergedMarkets.length, - }); - - return enrichCatalogMarketsWithLogging(mergedMarkets, customRpcUrls, { - chainIds: chainIds.join(','), - source: 'envio-merged-fallback', }); } } catch (error) { - logDataSourceEvent('market-catalog', 'Envio market catalog failed, using legacy fallback', { + logDataSourceEvent('market-catalog', 'Envio market catalog failed', { chainIds: chainIds.join(','), reason: getErrorMessage(error), }); } } else { - logDataSourceEvent('market-catalog', 'Envio endpoint not configured, using legacy sources', { + logDataSourceEvent('market-catalog', 'Envio endpoint not configured, using Morpho market catalog only', { chainIds: chainIds.join(','), }); } - const morphoSupportedChainIds = chainIds.filter(supportsMorphoApi); - const subgraphOnlyChainIds = chainIds.filter((chainId) => !supportsMorphoApi(chainId)); - const markets: Market[] = []; + if (indexedMarkets.length === 0) { + const morphoFetchStartedAt = Date.now(); - if (morphoSupportedChainIds.length > 0) { try { - markets.push(...(await fetchMorphoMarketsMultiChain(morphoSupportedChainIds))); - logDataSourceEvent('market-catalog', 'used Morpho API fallback for supported chains', { - chainIds: morphoSupportedChainIds.join(','), + indexedMarkets = filterTokenBlacklistedMarkets((await fetchMorphoMarketsMultiChain(chainIds)).map(toIndexedMarket)); + logDataSourceEvent('market-catalog', 'using Morpho market catalog fallback', { + chainIds: chainIds.join(','), + count: indexedMarkets.length, + durationMs: Date.now() - morphoFetchStartedAt, }); } catch (error) { - logDataSourceEvent('market-catalog', 'Morpho multi-chain fallback failed, retrying per-network fallback', { - chainIds: morphoSupportedChainIds.join(','), + logDataSourceEvent('market-catalog', 'Morpho market catalog fetch failed', { + chainIds: chainIds.join(','), reason: getErrorMessage(error), }); - markets.push(...(await fetchMarketsPerNetworkFallback(morphoSupportedChainIds))); } } - if (subgraphOnlyChainIds.length > 0) { - const subgraphMarkets = await Promise.all(subgraphOnlyChainIds.map((network) => fetchSubgraphMarkets(network))); - markets.push(...filterTokenBlacklistedMarkets(subgraphMarkets.flat())); - - logDataSourceEvent('market-catalog', 'used subgraph fallback for non-Morpho chains', { - chainIds: subgraphOnlyChainIds.join(','), - }); - } - - const mergedMarkets = mergeMarketsByIdentity(markets); - - if (mergedMarkets.length > 0) { - return enrichCatalogMarketsWithLogging(mergedMarkets, customRpcUrls, { + if (indexedMarkets.length > 0) { + return enrichCatalogMarketsWithLogging(indexedMarkets, customRpcUrls, { chainIds: chainIds.join(','), - source: 'legacy-fallback', + source, }); } - return fetchMarketsPerNetworkFallback(chainIds); + throw new Error('Failed to fetch market catalog from Morpho API and Envio'); }; diff --git a/src/data-sources/market-details.ts b/src/data-sources/market-details.ts index 9f578597..1344d596 100644 --- a/src/data-sources/market-details.ts +++ b/src/data-sources/market-details.ts @@ -1,8 +1,8 @@ -import { hasEnvioIndexer, supportsMorphoApi } from '@/config/dataSources'; +import { hasEnvioIndexer } from '@/config/dataSources'; import { fetchEnvioMarket } from '@/data-sources/envio/market'; import { fetchMorphoMarket } from '@/data-sources/morpho-api/market'; +import { toIndexedMarket } from '@/data-sources/shared/market-metadata'; import { isTokenBlacklistedMarket } from '@/data-sources/shared/market-visibility'; -import { fetchSubgraphMarket } from '@/data-sources/subgraph/market'; import { enrichMarketsWithHistoricalApysWithinTimeout } from '@/data-sources/shared/market-rate-enrichment'; import { enrichMarketsWithTargetRate } from '@/data-sources/shared/market-target-rate-enrichment'; import { fillMissingMarketUsdValues } from '@/data-sources/shared/market-usd'; @@ -26,37 +26,24 @@ export const fetchMarketDetails = async ( if (hasEnvioIndexer()) { try { - baseMarket = await fetchEnvioMarket(uniqueKey, network, { + const envioMarket = await fetchEnvioMarket(uniqueKey, network, { customRpcUrls, }); - if (baseMarket) { + if (envioMarket) { + baseMarket = toIndexedMarket(envioMarket); logDataSourceEvent('market-details', 'using Envio market details', { chainId: network, marketUniqueKey: uniqueKey, }); - } - } catch (error) { - logDataSourceEvent('market-details', 'Envio market details failed, falling back', { - chainId: network, - marketUniqueKey: uniqueKey, - reason: getErrorMessage(error), - }); - } - } - - if (!baseMarket && supportsMorphoApi(network)) { - try { - baseMarket = await fetchMorphoMarket(uniqueKey, network); - - if (baseMarket) { - logDataSourceEvent('market-details', 'using Morpho API fallback for market details', { + } else { + logDataSourceEvent('market-details', 'Envio market details returned no result, falling back', { chainId: network, marketUniqueKey: uniqueKey, }); } } catch (error) { - logDataSourceEvent('market-details', 'Morpho market details failed, falling back to subgraph', { + logDataSourceEvent('market-details', 'Envio market details failed', { chainId: network, marketUniqueKey: uniqueKey, reason: getErrorMessage(error), @@ -66,16 +53,14 @@ export const fetchMarketDetails = async ( if (!baseMarket) { try { - baseMarket = await fetchSubgraphMarket(uniqueKey, network); - - if (baseMarket) { - logDataSourceEvent('market-details', 'using subgraph fallback for market details', { - chainId: network, - marketUniqueKey: uniqueKey, - }); - } + const morphoMarket = await fetchMorphoMarket(uniqueKey, network); + baseMarket = toIndexedMarket(morphoMarket); + logDataSourceEvent('market-details', 'using Morpho market details fallback', { + chainId: network, + marketUniqueKey: uniqueKey, + }); } catch (error) { - logDataSourceEvent('market-details', 'subgraph market details failed', { + logDataSourceEvent('market-details', 'Morpho market details failed', { chainId: network, marketUniqueKey: uniqueKey, reason: getErrorMessage(error), diff --git a/src/data-sources/market-historical.ts b/src/data-sources/market-historical.ts index 46a36d26..b8b93587 100644 --- a/src/data-sources/market-historical.ts +++ b/src/data-sources/market-historical.ts @@ -1,8 +1,7 @@ -import { hasEnvioIndexer, supportsMorphoApi } from '@/config/dataSources'; +import { hasEnvioIndexer } from '@/config/dataSources'; import { fetchEnvioMarketHistoricalData } from '@/data-sources/envio/historical'; import { fetchMorphoMarketHistoricalData, type HistoricalDataSuccessResult } from '@/data-sources/morpho-api/historical'; import { getErrorMessage, logDataSourceEvent } from '@/data-sources/shared/source-debug'; -import { fetchSubgraphMarketHistoricalData } from '@/data-sources/subgraph/historical'; import type { CustomRpcUrls } from '@/stores/useCustomRpc'; import type { SupportedNetworks } from '@/utils/networks'; import type { TimeseriesOptions } from '@/utils/types'; @@ -39,29 +38,23 @@ export const fetchMarketHistoricalData = async ( } } - if (supportsMorphoApi(network)) { - try { - const morphoData = await fetchMorphoMarketHistoricalData(uniqueKey, network, options); + try { + const morphoData = await fetchMorphoMarketHistoricalData(uniqueKey, network, options); - if (morphoData) { - logDataSourceEvent('market-historical', 'using Morpho API fallback for historical data', { - chainId: network, - marketUniqueKey: uniqueKey, - }); - return morphoData; - } - } catch (error) { - logDataSourceEvent('market-historical', 'Morpho historical fetch failed, falling back to subgraph', { + if (morphoData) { + logDataSourceEvent('market-historical', 'using Morpho API fallback for historical data', { chainId: network, marketUniqueKey: uniqueKey, - reason: getErrorMessage(error), }); + return morphoData; } + } catch (error) { + logDataSourceEvent('market-historical', 'Morpho historical fetch failed', { + chainId: network, + marketUniqueKey: uniqueKey, + reason: getErrorMessage(error), + }); } - logDataSourceEvent('market-historical', 'using subgraph fallback for historical data', { - chainId: network, - marketUniqueKey: uniqueKey, - }); - return fetchSubgraphMarketHistoricalData(uniqueKey, network, options); + return null; }; diff --git a/src/data-sources/market-participants.ts b/src/data-sources/market-participants.ts index 9db3da8d..b671b044 100644 --- a/src/data-sources/market-participants.ts +++ b/src/data-sources/market-participants.ts @@ -1,10 +1,8 @@ -import { hasEnvioIndexer, supportsMorphoApi } from '@/config/dataSources'; +import { hasEnvioIndexer } from '@/config/dataSources'; import { fetchEnvioMarketBorrowers, fetchEnvioMarketSuppliers } from '@/data-sources/envio/market-participants'; import { fetchMorphoMarketBorrowers } from '@/data-sources/morpho-api/market-borrowers'; import { fetchMorphoMarketSuppliers } from '@/data-sources/morpho-api/market-suppliers'; import { getErrorMessage, logDataSourceEvent } from '@/data-sources/shared/source-debug'; -import { fetchSubgraphMarketBorrowers } from '@/data-sources/subgraph/market-borrowers'; -import { fetchSubgraphMarketSuppliers } from '@/data-sources/subgraph/market-suppliers'; import type { SupportedNetworks } from '@/utils/networks'; import type { PaginatedMarketBorrowers, PaginatedMarketSuppliers } from '@/utils/types'; @@ -27,23 +25,20 @@ export const fetchMarketBorrowers = async ( } } - if (supportsMorphoApi(network)) { - try { - return await fetchMorphoMarketBorrowers(marketId, Number(network), minShares, pageSize, skip); - } catch (error) { - logDataSourceEvent('market-borrowers', 'Morpho API borrowers fetch failed, falling back to subgraph', { - chainId: network, - marketUniqueKey: marketId, - reason: getErrorMessage(error), - }); - } + try { + return await fetchMorphoMarketBorrowers(marketId, Number(network), minShares, pageSize, skip); + } catch (error) { + logDataSourceEvent('market-borrowers', 'Morpho API borrowers fetch failed', { + chainId: network, + marketUniqueKey: marketId, + reason: getErrorMessage(error), + }); } - logDataSourceEvent('market-borrowers', 'using subgraph fallback for borrowers', { - chainId: network, - marketUniqueKey: marketId, - }); - return fetchSubgraphMarketBorrowers(marketId, network, minShares, pageSize, skip); + return { + items: [], + totalCount: 0, + }; }; export const fetchMarketSuppliers = async ( @@ -65,21 +60,18 @@ export const fetchMarketSuppliers = async ( } } - if (supportsMorphoApi(network)) { - try { - return await fetchMorphoMarketSuppliers(marketId, Number(network), minShares, pageSize, skip); - } catch (error) { - logDataSourceEvent('market-suppliers', 'Morpho API suppliers fetch failed, falling back to subgraph', { - chainId: network, - marketUniqueKey: marketId, - reason: getErrorMessage(error), - }); - } + try { + return await fetchMorphoMarketSuppliers(marketId, Number(network), minShares, pageSize, skip); + } catch (error) { + logDataSourceEvent('market-suppliers', 'Morpho API suppliers fetch failed', { + chainId: network, + marketUniqueKey: marketId, + reason: getErrorMessage(error), + }); } - logDataSourceEvent('market-suppliers', 'using subgraph fallback for suppliers', { - chainId: network, - marketUniqueKey: marketId, - }); - return fetchSubgraphMarketSuppliers(marketId, network, minShares, pageSize, skip); + return { + items: [], + totalCount: 0, + }; }; diff --git a/src/data-sources/morpho-api/market-metadata.ts b/src/data-sources/morpho-api/market-metadata.ts new file mode 100644 index 00000000..536c2ea0 --- /dev/null +++ b/src/data-sources/morpho-api/market-metadata.ts @@ -0,0 +1,136 @@ +import { marketMetadataDetailQuery, marketsMetadataQuery } from '@/graphql/morpho-api-queries'; +import { getChainScopedMarketKey } from '@/utils/marketIdentity'; +import { ALL_SUPPORTED_NETWORKS, type SupportedNetworks } from '@/utils/networks'; +import type { MarketMetadata, MarketWarning } from '@/utils/types'; +import { morphoGraphqlFetcher } from './fetchers'; + +type MorphoApiMarketMetadata = { + uniqueKey: string; + morphoBlue: { + chain: { + id: SupportedNetworks; + }; + }; + warnings?: MarketWarning[] | null; + supplyingVaults?: { + address: string; + }[] | null; +}; + +type MarketMetadataGraphQLResponse = { + data?: { + marketByUniqueKey?: MorphoApiMarketMetadata | null; + }; +}; + +type MarketsMetadataGraphQLResponse = { + data?: { + markets?: { + items?: MorphoApiMarketMetadata[]; + pageInfo?: { + countTotal: number; + }; + }; + }; +}; + +type MorphoMarketMetadataPage = { + items: MarketMetadata[]; + totalCount: number; +}; + +const MORPHO_MARKET_METADATA_PAGE_SIZE = 500; +const MORPHO_MARKET_METADATA_PAGE_BATCH_SIZE = 4; +const MORPHO_MARKET_METADATA_TIMEOUT_MS = 20_000; + +const toMarketMetadata = (market: MorphoApiMarketMetadata): MarketMetadata => { + return { + uniqueKey: market.uniqueKey, + chainId: market.morphoBlue.chain.id, + warnings: market.warnings ?? [], + supplyingVaults: market.supplyingVaults ?? [], + }; +}; + +export const fetchMorphoMarketMetadata = async ( + uniqueKey: string, + chainId: SupportedNetworks, +): Promise => { + const response = await morphoGraphqlFetcher(marketMetadataDetailQuery, { + uniqueKey, + chainId, + }); + + return response?.data?.marketByUniqueKey ? toMarketMetadata(response.data.marketByUniqueKey) : null; +}; + +const fetchMorphoMarketsMetadataPageForChains = async ( + chainIds: SupportedNetworks[], + skip: number, + pageSize: number, +): Promise => { + const response = await morphoGraphqlFetcher( + marketsMetadataQuery, + { + first: pageSize, + skip, + where: { + chainId_in: chainIds, + }, + }, + { + timeoutMs: MORPHO_MARKET_METADATA_TIMEOUT_MS, + }, + ); + + if (!response?.data?.markets?.items || !response.data.markets.pageInfo) { + throw new Error(`Morpho market metadata page is incomplete at skip=${skip} for chains ${chainIds.join(',')}`); + } + + return { + items: response.data.markets.items.map(toMarketMetadata), + totalCount: response.data.markets.pageInfo.countTotal, + }; +}; + +export const fetchMorphoMarketsMetadataMultiChain = async ( + chainIds: SupportedNetworks[] = ALL_SUPPORTED_NETWORKS, +): Promise> => { + const metadataByKey = new Map(); + const firstPage = await fetchMorphoMarketsMetadataPageForChains(chainIds, 0, MORPHO_MARKET_METADATA_PAGE_SIZE); + + for (const metadata of firstPage.items) { + metadataByKey.set(getChainScopedMarketKey(metadata.uniqueKey, metadata.chainId), metadata); + } + + const firstPageCount = firstPage.items.length; + const totalCount = firstPage.totalCount; + + if (firstPageCount === 0 && totalCount > 0) { + throw new Error('Morpho market metadata first page returned zero items despite a positive total count.'); + } + + const remainingOffsets: number[] = []; + for (let nextSkip = firstPageCount; nextSkip < totalCount; nextSkip += MORPHO_MARKET_METADATA_PAGE_SIZE) { + remainingOffsets.push(nextSkip); + } + + for (let index = 0; index < remainingOffsets.length; index += MORPHO_MARKET_METADATA_PAGE_BATCH_SIZE) { + const offsetBatch = remainingOffsets.slice(index, index + MORPHO_MARKET_METADATA_PAGE_BATCH_SIZE); + const settledPages = await Promise.allSettled( + offsetBatch.map((skip) => fetchMorphoMarketsMetadataPageForChains(chainIds, skip, MORPHO_MARKET_METADATA_PAGE_SIZE)), + ); + + for (const settledPage of settledPages) { + if (settledPage.status === 'rejected') { + throw settledPage.reason; + } + + for (const metadata of settledPage.value.items) { + metadataByKey.set(getChainScopedMarketKey(metadata.uniqueKey, metadata.chainId), metadata); + } + } + } + + return metadataByKey; +}; diff --git a/src/data-sources/morpho-api/positions.ts b/src/data-sources/morpho-api/positions.ts index 7cafb8e8..5b9edca5 100644 --- a/src/data-sources/morpho-api/positions.ts +++ b/src/data-sources/morpho-api/positions.ts @@ -1,5 +1,5 @@ import { userPositionsQuery, userPositionForMarketQuery } from '@/graphql/morpho-api-queries'; -import type { SupportedNetworks } from '@/utils/networks'; +import { ALL_SUPPORTED_NETWORKS, type SupportedNetworks } from '@/utils/networks'; import type { MarketPosition } from '@/utils/types'; import { morphoGraphqlFetcher } from './fetchers'; @@ -35,11 +35,17 @@ type ValidMarketPosition = MarketPosition & { export const fetchMorphoUserPositionMarkets = async ( userAddress: string, network: SupportedNetworks, +): Promise<{ marketUniqueKey: string; chainId: number }[]> => { + return fetchMorphoUserPositionMarketsMultiChain(userAddress, [network]); +}; + +export const fetchMorphoUserPositionMarketsMultiChain = async ( + userAddress: string, + chainIds: SupportedNetworks[] = ALL_SUPPORTED_NETWORKS, ): Promise<{ marketUniqueKey: string; chainId: number }[]> => { try { const result = await morphoGraphqlFetcher(userPositionsQuery, { address: userAddress.toLowerCase(), - chainId: network, }); // Handle NOT_FOUND - return empty array @@ -48,12 +54,15 @@ export const fetchMorphoUserPositionMarkets = async ( } const marketPositions = result.data?.userByAddress?.marketPositions ?? []; + const chainIdSet = new Set(chainIds); // Filter for valid positions and extract market key and chain ID const positionMarkets = marketPositions .filter( (position): position is ValidMarketPosition => - position.market?.uniqueKey !== undefined && position.market?.morphoBlue?.chain?.id !== undefined, + position.market?.uniqueKey !== undefined && + position.market?.morphoBlue?.chain?.id !== undefined && + chainIdSet.has(position.market.morphoBlue.chain.id as SupportedNetworks), ) .map((position) => ({ marketUniqueKey: position.market.uniqueKey, @@ -62,7 +71,7 @@ export const fetchMorphoUserPositionMarkets = async ( return positionMarkets; } catch (error) { - console.error(`Failed to fetch position markets from Morpho API for ${userAddress} on ${network}:`, error); + console.error(`Failed to fetch cross-chain position markets from Morpho API for ${userAddress}:`, error); throw error; // Re-throw to allow caller to handle fallback } }; diff --git a/src/data-sources/morpho-api/transactions.ts b/src/data-sources/morpho-api/transactions.ts index ef54247e..5c4d5dd7 100644 --- a/src/data-sources/morpho-api/transactions.ts +++ b/src/data-sources/morpho-api/transactions.ts @@ -1,20 +1,75 @@ import { userTransactionsQuery } from '@/graphql/morpho-api-queries'; import type { TransactionFilters, TransactionResponse } from '@/hooks/queries/fetchUserTransactions'; +import { UserTxTypes, type UserTransaction } from '@/utils/types'; import { morphoGraphqlFetcher } from './fetchers'; // Define the expected shape of the GraphQL response for transactions +type MorphoTransactionItem = Omit & { + data?: { + __typename?: UserTxTypes; + assets?: string; + shares?: string; + market?: { + uniqueKey?: string; + morphoBlue?: { + chain?: { + id?: number; + }; + }; + }; + }; +}; + type MorphoTransactionsApiResponse = { data?: { - transactions?: TransactionResponse; + transactions?: Omit & { + items: MorphoTransactionItem[]; + }; }; // errors are handled by the fetcher }; +const resolveChainIds = (filters: TransactionFilters): number[] => { + return [...new Set(filters.chainIds ?? (filters.chainId != null ? [filters.chainId] : []))]; +}; + +const normalizeMorphoTransaction = ( + transaction: MorphoTransactionItem, + fallbackChainIds: number[], +): UserTransaction | null => { + const marketUniqueKey = transaction.data?.market?.uniqueKey; + if (!marketUniqueKey) { + return null; + } + + const chainId = transaction.data?.market?.morphoBlue?.chain?.id ?? (fallbackChainIds.length === 1 ? fallbackChainIds[0] : undefined); + if (chainId == null) { + return null; + } + + return { + chainId, + hash: transaction.hash, + timestamp: transaction.timestamp, + type: transaction.type, + data: { + __typename: transaction.data?.__typename ?? transaction.type, + assets: transaction.data?.assets ?? '0', + shares: transaction.data?.shares ?? '0', + market: { + uniqueKey: marketUniqueKey, + }, + }, + }; +}; + export const fetchMorphoTransactions = async (filters: TransactionFilters): Promise => { + const chainIds = resolveChainIds(filters); + // Conditionally construct the 'where' object const whereClause: Record = { userAddress_in: filters.userAddress, // Assuming this is always required - chainId_in: [filters.chainId], + chainId_in: chainIds, }; if (filters.marketUniqueKeys && filters.marketUniqueKeys.length > 0) { @@ -58,7 +113,15 @@ export const fetchMorphoTransactions = async (filters: TransactionFilters): Prom }; } - return transactions; + const normalizedItems = transactions.items + .map((transaction) => normalizeMorphoTransaction(transaction, chainIds)) + .filter((transaction): transaction is UserTransaction => transaction !== null); + + return { + error: transactions.error, + items: normalizedItems, + pageInfo: transactions.pageInfo, + }; } catch (err) { console.error('Error fetching Morpho API transactions:', err); return { diff --git a/src/data-sources/position-markets.ts b/src/data-sources/position-markets.ts index 8aaf23d1..c34b55b4 100644 --- a/src/data-sources/position-markets.ts +++ b/src/data-sources/position-markets.ts @@ -1,8 +1,7 @@ -import { hasEnvioIndexer, supportsMorphoApi } from '@/config/dataSources'; +import { hasEnvioIndexer } from '@/config/dataSources'; import { fetchEnvioUserPositionMarkets } from '@/data-sources/envio/positions'; -import { fetchMorphoUserPositionMarkets } from '@/data-sources/morpho-api/positions'; +import { fetchMorphoUserPositionMarketsMultiChain } from '@/data-sources/morpho-api/positions'; import { getErrorMessage, logDataSourceEvent } from '@/data-sources/shared/source-debug'; -import { fetchSubgraphUserPositionMarkets } from '@/data-sources/subgraph/positions'; import { getChainScopedMarketKey } from '@/utils/marketIdentity'; import { ALL_SUPPORTED_NETWORKS, type SupportedNetworks } from '@/utils/networks'; @@ -44,27 +43,6 @@ const dedupePositionMarkets = (markets: PositionMarket[]): PositionMarket[] => { return Array.from(uniqueMarkets.values()); }; -const fetchPositionMarketsPerNetworkFallback = async ( - user: string, - chainIds: SupportedNetworks[], -): Promise => { - const results = await Promise.allSettled( - chainIds.map(async (network) => { - if (supportsMorphoApi(network)) { - try { - return await fetchMorphoUserPositionMarkets(user, network); - } catch { - return fetchSubgraphUserPositionMarkets(user, network); - } - } - - return fetchSubgraphUserPositionMarkets(user, network); - }), - ); - - return dedupePositionMarkets(results.flatMap((result) => (result.status === 'fulfilled' ? result.value : []))); -}; - export const fetchUserPositionMarkets = async ( user: string, chainIds: SupportedNetworks[] = ALL_SUPPORTED_NETWORKS, @@ -96,8 +74,8 @@ export const fetchUserPositionMarkets = async ( } } - logDataSourceEvent('position-markets', 'using per-network position discovery fallback', { + logDataSourceEvent('position-markets', 'using Morpho cross-chain position discovery fallback', { chainIds: chainIds.join(','), }); - return fetchPositionMarketsPerNetworkFallback(user, chainIds); + return dedupePositionMarkets(await fetchMorphoUserPositionMarketsMultiChain(user, chainIds)); }; diff --git a/src/data-sources/shared/market-merge.ts b/src/data-sources/shared/market-merge.ts deleted file mode 100644 index ba748436..00000000 --- a/src/data-sources/shared/market-merge.ts +++ /dev/null @@ -1,50 +0,0 @@ -import { getChainScopedMarketKey } from '@/utils/marketIdentity'; -import type { Market } from '@/utils/types'; - -export const mergeMarketState = (baseMarket: Market, overlayMarket: Market): Market => { - return { - ...baseMarket, - lltv: overlayMarket.lltv || baseMarket.lltv, - irmAddress: overlayMarket.irmAddress || baseMarket.irmAddress, - oracleAddress: overlayMarket.oracleAddress || baseMarket.oracleAddress, - state: { - ...baseMarket.state, - borrowAssets: overlayMarket.state.borrowAssets, - supplyAssets: overlayMarket.state.supplyAssets, - borrowShares: overlayMarket.state.borrowShares, - supplyShares: overlayMarket.state.supplyShares, - liquidityAssets: overlayMarket.state.liquidityAssets, - utilization: overlayMarket.state.utilization, - supplyApy: overlayMarket.state.supplyApy, - borrowApy: overlayMarket.state.borrowApy, - fee: overlayMarket.state.fee, - timestamp: overlayMarket.state.timestamp, - apyAtTarget: overlayMarket.state.apyAtTarget, - rateAtTarget: overlayMarket.state.rateAtTarget, - dailySupplyApy: overlayMarket.state.dailySupplyApy ?? baseMarket.state.dailySupplyApy, - dailyBorrowApy: overlayMarket.state.dailyBorrowApy ?? baseMarket.state.dailyBorrowApy, - weeklySupplyApy: overlayMarket.state.weeklySupplyApy ?? baseMarket.state.weeklySupplyApy, - weeklyBorrowApy: overlayMarket.state.weeklyBorrowApy ?? baseMarket.state.weeklyBorrowApy, - monthlySupplyApy: overlayMarket.state.monthlySupplyApy ?? baseMarket.state.monthlySupplyApy, - monthlyBorrowApy: overlayMarket.state.monthlyBorrowApy ?? baseMarket.state.monthlyBorrowApy, - }, - }; -}; - -export const mergeMarketsByIdentity = (markets: Market[]): Market[] => { - const merged = new Map(); - - for (const market of markets) { - const key = getChainScopedMarketKey(market.uniqueKey, market.morphoBlue.chain.id); - const existing = merged.get(key); - - if (!existing) { - merged.set(key, market); - continue; - } - - merged.set(key, mergeMarketState(existing, market)); - } - - return Array.from(merged.values()); -}; diff --git a/src/data-sources/shared/market-metadata.ts b/src/data-sources/shared/market-metadata.ts new file mode 100644 index 00000000..6643ef81 --- /dev/null +++ b/src/data-sources/shared/market-metadata.ts @@ -0,0 +1,35 @@ +import { getChainScopedMarketKey } from '@/utils/marketIdentity'; +import type { Market, MarketMetadata } from '@/utils/types'; + +export const toIndexedMarket = (market: Market): Market => { + return { + ...market, + warnings: [], + supplyingVaults: [], + }; +}; + +export const applyMarketMetadata = (market: Market, metadata: MarketMetadata | null | undefined): Market => { + if (!metadata) { + return market; + } + + return { + ...market, + warnings: metadata.warnings, + supplyingVaults: metadata.supplyingVaults, + }; +}; + +export const applyMarketMetadataMap = (markets: Market[], metadataMap: ReadonlyMap): Market[] => { + if (markets.length === 0 || metadataMap.size === 0) { + return markets; + } + + return markets.map((market) => + applyMarketMetadata( + market, + metadataMap.get(getChainScopedMarketKey(market.uniqueKey, market.morphoBlue.chain.id)), + ), + ); +}; diff --git a/src/data-sources/subgraph/historical.ts b/src/data-sources/subgraph/historical.ts deleted file mode 100644 index 62cb5d79..00000000 --- a/src/data-sources/subgraph/historical.ts +++ /dev/null @@ -1,164 +0,0 @@ -import { marketHourlySnapshotsQuery } from '@/graphql/morpho-subgraph-queries'; -import type { SupportedNetworks } from '@/utils/networks'; -import { getSubgraphUrl } from '@/utils/subgraph-urls'; -import type { TimeseriesOptions, TimeseriesDataPoint, MarketRates, MarketVolumes } from '@/utils/types'; -import type { HistoricalDataSuccessResult } from '../morpho-api/historical'; -import { subgraphGraphqlFetcher } from './fetchers'; - -// --- Subgraph Specific Types (Copied from useSubgraphMarketHistoricalData.ts) --- -type SubgraphInterestRate = { - id: string; - rate: string; - side: 'LENDER' | 'BORROWER'; - type: 'VARIABLE' | 'STABLE' | 'FIXED'; -}; - -type SubgraphMarketHourlySnapshot = { - id: string; - timestamp: string; - market: { - id: string; - }; - rates: SubgraphInterestRate[]; - totalDepositBalanceUSD: string; - totalBorrowBalanceUSD: string; - inputTokenBalance: string; - inputTokenPriceUSD: string; - hourlyDepositUSD: string; - hourlyBorrowUSD: string; - outputTokenSupply: string | null; - variableBorrowedTokenBalance: string | null; -}; - -type SubgraphMarketHourlySnapshotQueryResponse = { - data: { - marketHourlySnapshots: SubgraphMarketHourlySnapshot[]; - }; -}; -// --- End Subgraph Specific Types --- - -const safeParseFloat = (value: string | null | undefined): number => { - if (!value) { - return 0; - } - - const parsed = Number.parseFloat(value); - return Number.isFinite(parsed) ? parsed : 0; -}; -const UTILIZATION_SCALE = 1_000_000n; - -// Transformation function (simplified) -const transformSubgraphSnapshotsToHistoricalResult = ( - snapshots: SubgraphMarketHourlySnapshot[], // Expect non-empty array here -): HistoricalDataSuccessResult => { - const rates: MarketRates = { - supplyApy: [] as TimeseriesDataPoint[], - borrowApy: [] as TimeseriesDataPoint[], - apyAtTarget: [] as TimeseriesDataPoint[], - utilization: [] as TimeseriesDataPoint[], - }; - const volumes: MarketVolumes = { - supplyAssetsUsd: [] as TimeseriesDataPoint[], - borrowAssetsUsd: [] as TimeseriesDataPoint[], - liquidityAssetsUsd: [] as TimeseriesDataPoint[], - supplyAssets: [] as TimeseriesDataPoint[], - borrowAssets: [] as TimeseriesDataPoint[], - liquidityAssets: [] as TimeseriesDataPoint[], - }; - - // No need to check for !snapshots here, handled by caller - snapshots.forEach((snapshot) => { - const timestamp = Number.parseInt(snapshot.timestamp, 10); - if (isNaN(timestamp)) { - console.warn('Skipping snapshot due to invalid timestamp:', snapshot); - return; - } - - const snapshotRates = Array.isArray(snapshot.rates) ? snapshot.rates : []; - const supplyRate = snapshotRates.find((r) => r?.side === 'LENDER'); - const borrowRate = snapshotRates.find((r) => r?.side === 'BORROWER'); - - const supplyApyValue = supplyRate?.rate ? Number.parseFloat(supplyRate.rate) : 0; - const borrowApyValue = borrowRate?.rate ? Number.parseFloat(borrowRate.rate) : 0; - - rates.supplyApy.push({ - x: timestamp, - y: isNaN(supplyApyValue) ? 0 : supplyApyValue, - }); - rates.borrowApy.push({ - x: timestamp, - y: isNaN(borrowApyValue) ? 0 : borrowApyValue, - }); - const supplyAssetsUsd = safeParseFloat(snapshot.totalDepositBalanceUSD); - const borrowAssetsUsd = safeParseFloat(snapshot.totalBorrowBalanceUSD); - const liquidityAssetsUsd = Math.max(0, supplyAssetsUsd - borrowAssetsUsd); - - const supplyNative = BigInt(snapshot.inputTokenBalance ?? '0'); - const borrowNative = BigInt(snapshot.variableBorrowedTokenBalance ?? '0'); - const liquidityNative = supplyNative - borrowNative; - const utilization = - supplyNative > 0n ? Number((borrowNative * UTILIZATION_SCALE) / supplyNative) / Number(UTILIZATION_SCALE) : 0; - - rates.utilization.push({ x: timestamp, y: utilization }); - - volumes.supplyAssetsUsd.push({ x: timestamp, y: supplyAssetsUsd }); - volumes.borrowAssetsUsd.push({ x: timestamp, y: borrowAssetsUsd }); - volumes.liquidityAssetsUsd.push({ x: timestamp, y: liquidityAssetsUsd }); - - volumes.supplyAssets.push({ x: timestamp, y: Number(supplyNative) }); - volumes.borrowAssets.push({ x: timestamp, y: Number(borrowNative) }); - volumes.liquidityAssets.push({ x: timestamp, y: Number(liquidityNative) }); - }); - - // Sort data by timestamp - Object.values(rates).forEach((arr: TimeseriesDataPoint[]) => arr.sort((a: TimeseriesDataPoint, b: TimeseriesDataPoint) => a.x - b.x)); - Object.values(volumes).forEach((arr: TimeseriesDataPoint[]) => arr.sort((a: TimeseriesDataPoint, b: TimeseriesDataPoint) => a.x - b.x)); - - return { rates, volumes }; -}; - -// Fetcher function for Subgraph historical data -export const fetchSubgraphMarketHistoricalData = async ( - marketId: string, - network: SupportedNetworks, - timeRange: TimeseriesOptions, -): Promise => { - // Updated return type - - if (!timeRange.startTimestamp || !timeRange.endTimestamp) { - console.warn('Subgraph historical fetch requires start and end timestamps.'); - return null; // Return null - } - - const subgraphApiUrl = getSubgraphUrl(network); - if (!subgraphApiUrl) { - console.warn(`Subgraph URL for network ${network} is not defined. Cannot fetch historical data.`); - return null; // Return null - } - - try { - const variables = { - marketId: marketId.toLowerCase(), - startTimestamp: String(timeRange.startTimestamp), - endTimestamp: String(timeRange.endTimestamp), - }; - - const response = await subgraphGraphqlFetcher( - subgraphApiUrl, - marketHourlySnapshotsQuery, - variables, - ); - - // If no data or empty snapshots array, return null - if (!response?.data || !response.data.marketHourlySnapshots || response.data.marketHourlySnapshots.length === 0) { - console.warn(`No subgraph historical snapshots found for market ${marketId}`); - return null; - } - - // Pass the guaranteed non-empty array to the transformer - return transformSubgraphSnapshotsToHistoricalResult(response.data.marketHourlySnapshots); - } catch (error) { - console.error('Error fetching or processing subgraph historical data:', error); - return null; // Return null on error - } -}; diff --git a/src/data-sources/subgraph/market-borrowers.ts b/src/data-sources/subgraph/market-borrowers.ts deleted file mode 100644 index f03be539..00000000 --- a/src/data-sources/subgraph/market-borrowers.ts +++ /dev/null @@ -1,145 +0,0 @@ -import { marketBorrowersQuery } from '@/graphql/morpho-subgraph-queries'; -import type { SupportedNetworks } from '@/utils/networks'; -import { getSubgraphUrl } from '@/utils/subgraph-urls'; -import type { MarketBorrower, PaginatedMarketBorrowers } from '@/utils/types'; -import { subgraphGraphqlFetcher } from './fetchers'; - -// Type for the Subgraph response -type SubgraphBorrowerItem = { - shares: string; - account: { - id: string; - positions: { - balance: string; - }[]; - }; -}; - -type SubgraphBorrowersResponse = { - data?: { - market?: { - totalBorrow: string; - totalBorrowShares: string; - }; - positions?: SubgraphBorrowerItem[]; - }; -}; - -// In-memory cache for subgraph data (avoids refetching 1000 items on page change) -type CacheEntry = { - data: MarketBorrower[]; - timestamp: number; -}; - -const borrowersCache = new Map(); -const CACHE_TTL = 2 * 60 * 1000; // 2 minutes (same as React Query staleTime) - -function getCacheKey(marketId: string, network: SupportedNetworks, minShares: string): string { - return `${network}-${marketId}-${minShares}`; -} - -/** - * Fetches current market borrowers (positions) from the Subgraph. - * Uses adapter pattern: Always fetches top 1000 items and performs client-side pagination. - * Returns borrowers with their collateral balances. - * This approach keeps the interface identical to Morpho API while working within subgraph limits. - * - * @param marketId The ID of the market (unique key). - * @param network The blockchain network. - * @param minShares Minimum borrow share amount to filter borrowers (optional, defaults to '0'). - * @param pageSize Number of items to return per page (optional, defaults to 10). - * @param skip Number of items to skip for pagination (optional, defaults to 0). - * @returns A promise resolving to paginated MarketBorrower objects. - */ -export const fetchSubgraphMarketBorrowers = async ( - marketId: string, - network: SupportedNetworks, - minShares = '0', - pageSize = 10, - skip = 0, -): Promise => { - const subgraphUrl = getSubgraphUrl(network); - if (!subgraphUrl) { - console.warn(`No Subgraph URL configured for network: ${network}. Returning empty results.`); - return { items: [], totalCount: 0 }; - } - - const cacheKey = getCacheKey(marketId, network, minShares); - const now = Date.now(); - - // Check cache first - const cached = borrowersCache.get(cacheKey); - let allMappedItems: MarketBorrower[]; - - if (cached && now - cached.timestamp < CACHE_TTL) { - // Use cached data - allMappedItems = cached.data; - console.log(`Using cached borrowers data for ${marketId} (${allMappedItems.length} items)`); - } else { - // Fetch fresh data - always fetch top 1000 items (subgraph limit) - const variables = { - market: marketId, - minShares, - first: 1000, - skip: 0, - }; - - try { - const result = await subgraphGraphqlFetcher(subgraphUrl, marketBorrowersQuery, variables); - - const positions = result.data?.positions ?? []; - const market = result.data?.market; - - // Get market totals for share-to-asset conversion - const totalBorrow = BigInt(market?.totalBorrow ?? '0'); - const totalBorrowShares = BigInt(market?.totalBorrowShares ?? '0'); - - // Map all items to unified type - allMappedItems = positions.map((position) => { - // Convert borrow shares to borrow assets - // borrowAssets = (shares * totalBorrow) / totalBorrowShares - const shares = BigInt(position.shares); - let borrowAssets = '0'; - - if (totalBorrowShares > 0n) { - const assets = (shares * totalBorrow) / totalBorrowShares; - borrowAssets = assets.toString(); - } - - // Get collateral balance from nested positions (should be exactly 1) - const collateralBalance = position.account.positions[0]?.balance ?? '0'; - - return { - userAddress: position.account.id, - borrowAssets, - collateral: collateralBalance, - }; - }); - - // Update cache - borrowersCache.set(cacheKey, { - data: allMappedItems, - timestamp: now, - }); - - console.log(`Fetched and cached ${allMappedItems.length} borrowers for ${marketId}`); - } catch (error) { - console.error(`Error fetching or processing Subgraph market borrowers for ${marketId}:`, error); - if (error instanceof Error) { - throw error; - } - throw new Error('An unknown error occurred while fetching subgraph market borrowers'); - } - } - - // Perform client-side pagination by slicing the results - const start = skip; - const end = skip + pageSize; - const paginatedItems = allMappedItems.slice(start, end); - - // Return with actual total count (capped at 1000 by subgraph) - return { - items: paginatedItems, - totalCount: allMappedItems.length, - }; -}; diff --git a/src/data-sources/subgraph/market-borrows.ts b/src/data-sources/subgraph/market-borrows.ts deleted file mode 100644 index 2e5d7a7a..00000000 --- a/src/data-sources/subgraph/market-borrows.ts +++ /dev/null @@ -1,104 +0,0 @@ -import { marketBorrowsRepaysQuery } from '@/graphql/morpho-subgraph-queries'; -import type { SupportedNetworks } from '@/utils/networks'; -import { getSubgraphUrl } from '@/utils/subgraph-urls'; -import type { MarketActivityTransaction, PaginatedMarketActivityTransactions } from '@/utils/types'; // Import shared type -import { subgraphGraphqlFetcher } from './fetchers'; - -// Types specific to the Subgraph response for this query -type SubgraphBorrowRepayItem = { - amount: string; - account: { - id: string; - }; - timestamp: number | string; - hash: string; -}; - -type SubgraphBorrowsRepaysResponse = { - data?: { - borrows?: SubgraphBorrowRepayItem[]; - repays?: SubgraphBorrowRepayItem[]; - }; -}; - -/** - * Fetches market borrow/repay activities from the Subgraph. - * NOTE: Because borrows and repays are fetched separately and merged client-side, - * we cannot do proper server-side pagination. Instead, we fetch a large batch (200 items) - * from both sources, merge and sort them, then apply client-side pagination. - * This ensures correct ordering and prevents skipped items. - * @param marketId The ID of the market. - * @param loanAssetId The address of the loan asset. - * @param network The blockchain network. - * @param minAssets Minimum asset amount to filter transactions (optional, defaults to 0). - * @param first Number of items to return per page (optional, defaults to 8). - * @param skip Number of items to skip for pagination (optional, defaults to 0). - * @returns A promise resolving to paginated MarketActivityTransaction objects. - */ -export const fetchSubgraphMarketBorrows = async ( - marketId: string, - loanAssetId: string, - network: SupportedNetworks, - minAssets = '0', - first = 8, - skip = 0, -): Promise => { - const subgraphUrl = getSubgraphUrl(network); - if (!subgraphUrl) { - console.warn(`No Subgraph URL configured for network: ${network}. Returning empty results.`); - return { items: [], totalCount: 0 }; - } - - const fetchBatchSize = 200; - - const variables = { - marketId, - loanAssetId, - minAssets, - first: fetchBatchSize, - skip: 0, - }; - - try { - const result = await subgraphGraphqlFetcher(subgraphUrl, marketBorrowsRepaysQuery, variables); - - const borrows = result.data?.borrows ?? []; - const repays = result.data?.repays ?? []; - - const mappedBorrows: MarketActivityTransaction[] = borrows.map((b) => ({ - type: 'MarketBorrow', - hash: b.hash, - timestamp: typeof b.timestamp === 'string' ? Number.parseInt(b.timestamp, 10) : b.timestamp, - amount: b.amount, - userAddress: b.account.id, - })); - - const mappedRepays: MarketActivityTransaction[] = repays.map((r) => ({ - type: 'MarketRepay', - hash: r.hash, - timestamp: typeof r.timestamp === 'string' ? Number.parseInt(r.timestamp, 10) : r.timestamp, - amount: r.amount, - userAddress: r.account.id, - })); - - // Merge and sort by timestamp, then apply client-side pagination - const combined = [...mappedBorrows, ...mappedRepays]; - combined.sort((a, b) => b.timestamp - a.timestamp); - - const startIndex = skip; - const endIndex = skip + first; - const items = combined.slice(startIndex, endIndex); - const totalCount = combined.length; - - return { - items, - totalCount, - }; - } catch (error) { - console.error(`Error fetching or processing Subgraph market borrows for ${marketId}:`, error); - if (error instanceof Error) { - throw error; - } - throw new Error('An unknown error occurred while fetching subgraph market borrows'); - } -}; diff --git a/src/data-sources/subgraph/market-liquidations.ts b/src/data-sources/subgraph/market-liquidations.ts deleted file mode 100644 index 3880d328..00000000 --- a/src/data-sources/subgraph/market-liquidations.ts +++ /dev/null @@ -1,84 +0,0 @@ -import { marketLiquidationsAndBadDebtQuery } from '@/graphql/morpho-subgraph-queries'; -import type { SupportedNetworks } from '@/utils/networks'; -import { getSubgraphUrl } from '@/utils/subgraph-urls'; -import type { MarketLiquidationTransaction } from '@/utils/types'; // Import simplified type -import { subgraphGraphqlFetcher } from './fetchers'; - -// Types specific to the Subgraph response items -type SubgraphLiquidateItem = { - id: string; - hash: string; - timestamp: number | string; - repaid: string; - amount: string; - liquidator: { - id: string; - }; -}; - -type SubgraphBadDebtItem = { - badDebt: string; - liquidation: { - id: string; - }; -}; - -// Type for the overall Subgraph response -type SubgraphLiquidationsResponse = { - data?: { - liquidates?: SubgraphLiquidateItem[]; - badDebtRealizations?: SubgraphBadDebtItem[]; - }; -}; - -/** - * Fetches market liquidation activities from the Subgraph. - * Combines liquidation events with associated bad debt realizations. - * @param marketId The ID of the market. - * @param network The blockchain network. - * @returns A promise resolving to an array of simplified MarketLiquidationTransaction objects. - */ -export const fetchSubgraphMarketLiquidations = async ( - marketId: string, - network: SupportedNetworks, -): Promise => { - const subgraphUrl = getSubgraphUrl(network); - if (!subgraphUrl) { - console.warn(`No Subgraph URL configured for network: ${network}. Returning empty results.`); - return []; - } - - const variables = { marketId }; - - try { - const result = await subgraphGraphqlFetcher(subgraphUrl, marketLiquidationsAndBadDebtQuery, variables); - - const liquidates = result.data?.liquidates ?? []; - const badDebtItems = result.data?.badDebtRealizations ?? []; - - // Create a map for quick lookup of bad debt by liquidation ID - const badDebtMap = new Map(); - badDebtItems.forEach((item) => { - badDebtMap.set(item.liquidation.id, item.badDebt); - }); - - // Map liquidations, adding bad debt information - return liquidates.map((liq) => ({ - type: 'MarketLiquidation', - hash: liq.hash, - timestamp: typeof liq.timestamp === 'string' ? Number.parseInt(liq.timestamp, 10) : liq.timestamp, - // Subgraph query doesn't provide liquidator, use empty string or default - liquidator: liq.liquidator.id, - repaidAssets: liq.repaid, // Loan asset repaid - seizedAssets: liq.amount, // Collateral seized - // Fetch bad debt from the map using the liquidate event ID - badDebtAssets: badDebtMap.get(liq.id) ?? '0', // Default to '0' if no bad debt entry - })); - } catch (error) { - console.error(`Error fetching or processing Subgraph market liquidations for ${marketId}:`, error); - if (error instanceof Error) { - throw error; - } - throw new Error('An unknown error occurred while fetching subgraph market liquidations'); - } -}; diff --git a/src/data-sources/subgraph/market-suppliers.ts b/src/data-sources/subgraph/market-suppliers.ts deleted file mode 100644 index 9558e13a..00000000 --- a/src/data-sources/subgraph/market-suppliers.ts +++ /dev/null @@ -1,116 +0,0 @@ -import { marketSuppliersQuery } from '@/graphql/morpho-subgraph-queries'; -import type { SupportedNetworks } from '@/utils/networks'; -import { getSubgraphUrl } from '@/utils/subgraph-urls'; -import type { MarketSupplier, PaginatedMarketSuppliers } from '@/utils/types'; -import { subgraphGraphqlFetcher } from './fetchers'; - -// Type for the Subgraph response -type SubgraphSupplierItem = { - shares: string; - account: { - id: string; - }; -}; - -type SubgraphSuppliersResponse = { - data?: { - positions?: SubgraphSupplierItem[]; - }; -}; - -// In-memory cache for subgraph data (avoids refetching 1000 items on page change) -type CacheEntry = { - data: MarketSupplier[]; - timestamp: number; -}; - -const suppliersCache = new Map(); -const CACHE_TTL = 2 * 60 * 1000; // 2 minutes (same as React Query staleTime) - -function getCacheKey(marketId: string, network: SupportedNetworks, minShares: string): string { - return `${network}-${marketId}-${minShares}`; -} - -/** - * Fetches current market suppliers (positions) from the Subgraph. - * Uses adapter pattern: Always fetches top 1000 items and performs client-side pagination. - * This approach keeps the interface identical to Morpho API while working within subgraph limits. - * - * @param marketId The ID of the market (unique key). - * @param network The blockchain network. - * @param minShares Minimum share amount to filter suppliers (optional, defaults to '0'). - * @param pageSize Number of items to return per page (optional, defaults to 8). - * @param skip Number of items to skip for pagination (optional, defaults to 0). - * @returns A promise resolving to paginated MarketSupplier objects. - */ -export const fetchSubgraphMarketSuppliers = async ( - marketId: string, - network: SupportedNetworks, - minShares = '0', - pageSize = 8, - skip = 0, -): Promise => { - const subgraphUrl = getSubgraphUrl(network); - if (!subgraphUrl) { - console.warn(`No Subgraph URL configured for network: ${network}. Returning empty results.`); - return { items: [], totalCount: 0 }; - } - - const cacheKey = getCacheKey(marketId, network, minShares); - const now = Date.now(); - - // Check cache first - const cached = suppliersCache.get(cacheKey); - let allMappedItems: MarketSupplier[]; - - if (cached && now - cached.timestamp < CACHE_TTL) { - // Use cached data - allMappedItems = cached.data; - console.log(`Using cached suppliers data for ${marketId} (${allMappedItems.length} items)`); - } else { - // Fetch fresh data - always fetch top 1000 items (subgraph limit) - const variables = { - market: marketId, - minShares, - first: 1000, - skip: 0, - }; - - try { - const result = await subgraphGraphqlFetcher(subgraphUrl, marketSuppliersQuery, variables); - - const positions = result.data?.positions ?? []; - - // Map all items to unified type - allMappedItems = positions.map((position) => ({ - userAddress: position.account.id, - supplyShares: position.shares, - })); - - // Update cache - suppliersCache.set(cacheKey, { - data: allMappedItems, - timestamp: now, - }); - - console.log(`Fetched and cached ${allMappedItems.length} suppliers for ${marketId}`); - } catch (error) { - console.error(`Error fetching or processing Subgraph market suppliers for ${marketId}:`, error); - if (error instanceof Error) { - throw error; - } - throw new Error('An unknown error occurred while fetching subgraph market suppliers'); - } - } - - // Perform client-side pagination by slicing the results - const start = skip; - const end = skip + pageSize; - const paginatedItems = allMappedItems.slice(start, end); - - // Return with actual total count (capped at 1000 by subgraph) - return { - items: paginatedItems, - totalCount: allMappedItems.length, - }; -}; diff --git a/src/data-sources/subgraph/market-supplies.ts b/src/data-sources/subgraph/market-supplies.ts deleted file mode 100644 index edb49b1a..00000000 --- a/src/data-sources/subgraph/market-supplies.ts +++ /dev/null @@ -1,105 +0,0 @@ -import { marketDepositsWithdrawsQuery } from '@/graphql/morpho-subgraph-queries'; -import type { SupportedNetworks } from '@/utils/networks'; -import { getSubgraphUrl } from '@/utils/subgraph-urls'; // Import shared utility -import type { MarketActivityTransaction, PaginatedMarketActivityTransactions } from '@/utils/types'; -import { subgraphGraphqlFetcher } from './fetchers'; // Import shared fetcher - -// Types specific to the Subgraph response for this query -type SubgraphSupplyWithdrawItem = { - amount: string; - account: { - id: string; - }; - timestamp: number | string; // Allow string timestamp from subgraph - hash: string; -}; - -type SubgraphSuppliesWithdrawsResponse = { - data?: { - deposits?: SubgraphSupplyWithdrawItem[]; - withdraws?: SubgraphSupplyWithdrawItem[]; - }; - // Error handling is now done by the fetcher -}; - -/** - * Fetches market supply/withdraw activities (deposits/withdraws of loan asset) from the Subgraph. - * NOTE: Because deposits and withdraws are fetched separately and merged client-side, - * we cannot do proper server-side pagination. Instead, we fetch a large batch (200 items) - * from both sources, merge and sort them, then apply client-side pagination. - * This ensures correct ordering and prevents skipped items. - * @param marketId The ID of the market. - * @param loanAssetId The address of the loan asset. - * @param network The blockchain network. - * @param minAssets Minimum asset amount to filter transactions (optional, defaults to 0). - * @param first Number of items to return per page (optional, defaults to 8). - * @param skip Number of items to skip for pagination (optional, defaults to 0). - * @returns A promise resolving to paginated MarketActivityTransaction objects. - */ -export const fetchSubgraphMarketSupplies = async ( - marketId: string, - loanAssetId: string, - network: SupportedNetworks, - minAssets = '0', - first = 8, - skip = 0, -): Promise => { - const subgraphUrl = getSubgraphUrl(network); - if (!subgraphUrl) { - console.warn(`No Subgraph URL configured for network: ${network}. Returning empty results.`); - return { items: [], totalCount: 0 }; - } - - const fetchBatchSize = 200; - - const variables = { - marketId, - loanAssetId, - minAssets, - first: fetchBatchSize, - skip: 0, - }; - - try { - const result = await subgraphGraphqlFetcher(subgraphUrl, marketDepositsWithdrawsQuery, variables); - - const deposits = result.data?.deposits ?? []; - const withdraws = result.data?.withdraws ?? []; - - const mappedDeposits: MarketActivityTransaction[] = deposits.map((d) => ({ - type: 'MarketSupply', - hash: d.hash, - timestamp: typeof d.timestamp === 'string' ? Number.parseInt(d.timestamp, 10) : d.timestamp, - amount: d.amount, - userAddress: d.account.id, - })); - - const mappedWithdraws: MarketActivityTransaction[] = withdraws.map((w) => ({ - type: 'MarketWithdraw', - hash: w.hash, - timestamp: typeof w.timestamp === 'string' ? Number.parseInt(w.timestamp, 10) : w.timestamp, - amount: w.amount, - userAddress: w.account.id, - })); - - // Merge and sort by timestamp, then apply client-side pagination - const combined = [...mappedDeposits, ...mappedWithdraws]; - combined.sort((a, b) => b.timestamp - a.timestamp); - - const startIndex = skip; - const endIndex = skip + first; - const items = combined.slice(startIndex, endIndex); - const totalCount = combined.length; - - return { - items, - totalCount, - }; - } catch (error) { - console.error(`Error fetching or processing Subgraph market supplies for ${marketId}:`, error); - if (error instanceof Error) { - throw error; - } - throw new Error('An unknown error occurred while fetching subgraph market supplies'); - } -}; diff --git a/src/data-sources/subgraph/market.ts b/src/data-sources/subgraph/market.ts deleted file mode 100644 index 3eba7efe..00000000 --- a/src/data-sources/subgraph/market.ts +++ /dev/null @@ -1,253 +0,0 @@ -import type { Address } from 'viem'; -import { marketQuery as subgraphMarketQuery, marketsQuery as subgraphMarketsQuery } from '@/graphql/morpho-subgraph-queries'; // Assuming query is here -import { formatBalance } from '@/utils/balance'; -import type { SupportedNetworks } from '@/utils/networks'; -import type { SubgraphMarket, SubgraphMarketQueryResponse, SubgraphMarketsQueryResponse, SubgraphToken } from '@/utils/subgraph-types'; -import { getSubgraphUrl } from '@/utils/subgraph-urls'; -import { isForceUnwhitelisted } from '@/utils/markets'; -import { blacklistTokens, findToken } from '@/utils/tokens'; -import type { Market, MarketUsdPriceSource, MarketWarning } from '@/utils/types'; -import { UNRECOGNIZED_COLLATERAL, UNRECOGNIZED_LOAN } from '@/utils/warnings'; -import { subgraphGraphqlFetcher } from './fetchers'; - -// Helper to safely parse BigDecimal/BigInt strings -const safeParseFloat = (value: string | null | undefined): number => { - if (value === null || value === undefined) return 0; - try { - return Number.parseFloat(value); - } catch { - return 0; - } -}; - -const safeParseInt = (value: string | null | undefined): number => { - if (value === null || value === undefined) return 0; - try { - return Number.parseInt(value, 10); - } catch { - return 0; - } -}; - -const transformSubgraphMarketToMarket = ( - subgraphMarket: Partial, - network: SupportedNetworks, -): Market => { - const marketId = subgraphMarket.id ?? ''; - const lltv = subgraphMarket.lltv ?? '0'; - const irmAddress = subgraphMarket.irm ?? '0x'; - const oracleAddress = (subgraphMarket.oracle?.oracleAddress ?? '0x') as Address; - - const totalSupplyShares = subgraphMarket.totalSupplyShares ?? '0'; - const totalBorrowShares = subgraphMarket.totalBorrowShares ?? '0'; - const fee = subgraphMarket.fee ?? '0'; - - const mapToken = (token: Partial | undefined) => ({ - id: token?.id ?? '0x', - address: token?.id ?? '0x', - symbol: token?.symbol ?? 'Unknown', - name: token?.name ?? 'Unknown Token', - decimals: token?.decimals ?? 18, - }); - - const loanAsset = mapToken(subgraphMarket.borrowedToken); - const collateralAsset = mapToken(subgraphMarket.inputToken); - - const chainId = network; - - const supplyAssets = subgraphMarket.totalSupply ?? subgraphMarket.inputTokenBalance ?? '0'; - const borrowAssets = subgraphMarket.totalBorrow ?? subgraphMarket.variableBorrowedTokenBalance ?? '0'; - const collateralAssets = subgraphMarket.totalCollateral ?? '0'; - - const timestamp = safeParseInt(subgraphMarket.lastUpdate); - - const totalSupplyNum = safeParseFloat(supplyAssets); - const totalBorrowNum = safeParseFloat(borrowAssets); - const utilization = totalSupplyNum > 0 ? totalBorrowNum / totalSupplyNum : 0; - - const supplyApy = Number(subgraphMarket.rates?.find((r) => r.side === 'LENDER')?.rate ?? 0); - const borrowApy = Number(subgraphMarket.rates?.find((r) => r.side === 'BORROWER')?.rate ?? 0); - - const warnings: MarketWarning[] = []; - - let loanAssetPrice = safeParseFloat(subgraphMarket.borrowedToken?.lastPriceUSD ?? '0'); - let collateralAssetPrice = safeParseFloat(subgraphMarket.inputToken?.lastPriceUSD ?? '0'); - const hasUSDPrice = loanAssetPrice > 0; - const usdPriceSource: MarketUsdPriceSource = hasUSDPrice ? 'direct' : 'none'; - - const knownLoadAsset = findToken(loanAsset.address, network); - const knownCollateralAsset = findToken(collateralAsset.address, network); - - if (!knownLoadAsset) { - warnings.push(UNRECOGNIZED_LOAN); - } - if (!knownCollateralAsset) { - warnings.push(UNRECOGNIZED_COLLATERAL); - } - - const supplyAssetsUsd = formatBalance(supplyAssets, loanAsset.decimals) * loanAssetPrice; - const borrowAssetsUsd = formatBalance(borrowAssets, loanAsset.decimals) * loanAssetPrice; - - const liquidityAssets = (BigInt(supplyAssets) - BigInt(borrowAssets)).toString(); - const liquidityAssetsUsd = formatBalance(liquidityAssets, loanAsset.decimals) * loanAssetPrice; - - const collateralAssetsUsd = formatBalance(collateralAssets, collateralAsset.decimals) * collateralAssetPrice; - - const marketDetail = { - id: marketId, - uniqueKey: marketId, - lltv, - irmAddress: irmAddress as Address, - whitelisted: !isForceUnwhitelisted(marketId), - loanAsset, - collateralAsset, - state: { - borrowAssets, - supplyAssets, - liquidityAssets, - collateralAssets, - borrowShares: totalBorrowShares, - supplyShares: totalSupplyShares, - borrowAssetsUsd, - supplyAssetsUsd, - liquidityAssetsUsd, - collateralAssetsUsd, - utilization, - supplyApy, - borrowApy, - fee: safeParseFloat(fee) / 10_000, - timestamp, - apyAtTarget: 0, - rateAtTarget: '0', - // Subgraph doesn't support daily/weekly/monthly APY - return null - dailySupplyApy: null, - dailyBorrowApy: null, - weeklySupplyApy: null, - weeklyBorrowApy: null, - monthlySupplyApy: null, - monthlyBorrowApy: null, - }, - oracleAddress, - morphoBlue: { - id: subgraphMarket.protocol?.id ?? '0x', - address: subgraphMarket.protocol?.id ?? '0x', - chain: { id: chainId }, - }, - warnings, - hasUSDPrice, - usdPriceSource, - realizedBadDebt: { underlying: '0' }, - supplyingVaults: [], - }; - - return marketDetail; -}; - -// Fetcher for market details from Subgraph -export const fetchSubgraphMarket = async (uniqueKey: string, network: SupportedNetworks): Promise => { - const subgraphApiUrl = getSubgraphUrl(network); - - if (!subgraphApiUrl) { - console.warn(`Subgraph URL for network ${network} is not defined. Cannot fetch market.`); - return null; - } - - try { - const response = await subgraphGraphqlFetcher(subgraphApiUrl, subgraphMarketQuery, { - id: uniqueKey.toLowerCase(), - }); - - // Handle cases where GraphQL errors resulted in missing data - const marketData = response?.data?.market; - - if (!marketData) { - console.warn(`Market with key ${uniqueKey} not found in Subgraph response.`); - return null; - } - - return transformSubgraphMarketToMarket(marketData, network); - } catch (error) { - console.error(`Error fetching subgraph market ${uniqueKey} on ${network}:`, error); - return null; - } -}; - -type SubgraphMarketsVariables = { - first: number; - skip: number; - where?: { - inputToken_not_in?: string[]; - }; - network?: string; -}; - -const SUBGRAPH_MARKETS_PAGE_SIZE = 1000; -const SUBGRAPH_MARKETS_PAGE_BATCH_SIZE = 4; - -const fetchSubgraphMarketsPage = async (subgraphApiUrl: string, network: SupportedNetworks, skip: number): Promise => { - const variables: SubgraphMarketsVariables = { - first: SUBGRAPH_MARKETS_PAGE_SIZE, - skip, - where: { - inputToken_not_in: [...blacklistTokens, '0x0000000000000000000000000000000000000000'], - }, - }; - - const response = await subgraphGraphqlFetcher( - subgraphApiUrl, - subgraphMarketsQuery, - variables as unknown as Record, - ); - - const marketsData = response?.data?.markets; - - if (!marketsData || !Array.isArray(marketsData)) { - throw new Error(`No markets found or invalid format in Subgraph response for network ${network} at skip ${skip}.`); - } - - return marketsData; -}; - -export const fetchSubgraphMarkets = async (network: SupportedNetworks): Promise => { - const subgraphApiUrl = getSubgraphUrl(network); - - if (!subgraphApiUrl) { - throw new Error(`Subgraph URL for network ${network} is not defined.`); - } - - const allMarkets: SubgraphMarket[] = []; - - const firstPage = await fetchSubgraphMarketsPage(subgraphApiUrl, network, 0); - allMarkets.push(...firstPage); - - if (firstPage.length === SUBGRAPH_MARKETS_PAGE_SIZE) { - let nextSkip = SUBGRAPH_MARKETS_PAGE_SIZE; - let hasMorePages = true; - - while (hasMorePages) { - const offsetBatch = Array.from( - { length: SUBGRAPH_MARKETS_PAGE_BATCH_SIZE }, - (_, index) => nextSkip + index * SUBGRAPH_MARKETS_PAGE_SIZE, - ); - const settledPages = await Promise.allSettled(offsetBatch.map((skip) => fetchSubgraphMarketsPage(subgraphApiUrl, network, skip))); - - hasMorePages = false; - - for (const settledPage of settledPages) { - if (settledPage.status === 'rejected') { - throw settledPage.reason; - } - - allMarkets.push(...settledPage.value); - - if (settledPage.value.length === SUBGRAPH_MARKETS_PAGE_SIZE) { - hasMorePages = true; - } - } - - nextSkip += SUBGRAPH_MARKETS_PAGE_BATCH_SIZE * SUBGRAPH_MARKETS_PAGE_SIZE; - } - } - - return allMarkets.map((market) => transformSubgraphMarketToMarket(market, network)); -}; diff --git a/src/data-sources/subgraph/positions.ts b/src/data-sources/subgraph/positions.ts deleted file mode 100644 index c913fc78..00000000 --- a/src/data-sources/subgraph/positions.ts +++ /dev/null @@ -1,202 +0,0 @@ -import { request } from 'graphql-request'; -import { fetchSubgraphMarket } from '@/data-sources/subgraph/market'; // Need market data too -import { subgraphUserPositionMarketsQuery } from '@/graphql/morpho-subgraph-queries'; -import { subgraphUserMarketPositionQuery } from '@/graphql/morpho-subgraph-queries'; -import type { SupportedNetworks } from '@/utils/networks'; -import { getSubgraphUrl } from '@/utils/subgraph-urls'; -import type { MarketPosition } from '@/utils/types'; - -// The type expected by MarketPosition.state -type MarketPositionState = { - supplyShares: string; - supplyAssets: string; - borrowShares: string; - borrowAssets: string; - collateral: string; // This is collateral assets -}; - -type SubgraphPositionMarketResponse = { - data?: { - account?: { - positions?: { - market: { - id: string; - }; - }[]; - }; - }; - errors?: { message: string }[]; -}; - -type SubgraphPosition = { - id: string; - asset: { - id: string; // Token address - }; - isCollateral: boolean | null; - balance: string; // BigInt string - side: 'SUPPLIER' | 'COLLATERAL' | 'BORROWER'; -}; - -type SubgraphPositionResponse = { - positions?: SubgraphPosition[]; -}; - -/** - * Fetches the unique keys of markets where a user has a position from the Subgraph. - */ -export const fetchSubgraphUserPositionMarkets = async ( - userAddress: string, - network: SupportedNetworks, -): Promise<{ marketUniqueKey: string; chainId: number }[]> => { - const endpoint = getSubgraphUrl(network); - if (!endpoint) { - console.warn(`No subgraph endpoint found for network ${network}`); - return []; - } - - try { - const response = await fetch(endpoint, { - method: 'POST', - headers: { 'Content-Type': 'application/json' }, - body: JSON.stringify({ - query: subgraphUserPositionMarketsQuery, - variables: { - userId: userAddress.toLowerCase(), - }, - }), - }); - - const result = (await response.json()) as SubgraphPositionMarketResponse; - - if (result.errors) { - console.error(`Subgraph error fetching position markets for ${userAddress} on ${network}:`, result.errors); - throw new Error(result.errors.map((e) => e.message).join('; ')); - } - - const positions = result.data?.account?.positions ?? []; - - return positions.map((pos) => ({ - marketUniqueKey: pos.market.id, - chainId: network, // The network ID is passed in - })); - } catch (error) { - console.error(`Failed to fetch position markets from subgraph for ${userAddress} on ${network}:`, error); - return []; // Return empty array on error - } -}; - -/** - * Fetches and reconstructs a user's position for a specific market from the Subgraph. - * Combines position data with market data. - */ -export const fetchSubgraphUserPositionForMarket = async ( - marketUniqueKey: string, - userAddress: string, - network: SupportedNetworks, -): Promise => { - const subgraphUrl = getSubgraphUrl(network); - if (!subgraphUrl) { - console.error(`Subgraph URL not configured for network ${network}.`); - return null; - } - - try { - // 1. Fetch the market details first (needed for context) - const market = await fetchSubgraphMarket(marketUniqueKey, network); - if (!market) { - console.warn(`Market ${marketUniqueKey} not found via subgraph on ${network} while fetching user position.`); - return null; // Cannot proceed without market details - } - - // 2. Fetch the user's positions within that market - const response = await request(subgraphUrl, subgraphUserMarketPositionQuery, { - marketId: marketUniqueKey.toLowerCase(), // Ensure lowercase for subgraph ID matching - userId: userAddress.toLowerCase(), - }); - - const positions = response.positions ?? []; - - // 3. Reconstruct the MarketPosition.state object - let supplyShares = '0'; - let supplyAssets = '0'; - let borrowShares = '0'; - let borrowAssets = '0'; - let collateralAssets = '0'; - - positions.forEach((pos) => { - const balanceStr = pos.balance; - if (!balanceStr || balanceStr === '0') return; // Ignore zero/empty balances - - switch (pos.side) { - case 'SUPPLIER': - // Assuming the SUPPLIER asset is always the loan asset - if (pos.asset.id.toLowerCase() === market.loanAsset.address.toLowerCase()) { - // Subgraph returns shares for SUPPLIER side in `balance` - supplyShares = balanceStr; - // We also need supplyAssets. Subgraph might not directly provide this for the position. - // We might need to calculate it using market.state conversion rates, or rely on fetchPositionSnapshot. - // For now, let's assume fetchPositionSnapshot is the primary source for accurate assets. - // If falling back here, we might lack the direct asset value from subgraph. - // Let's set assets based on shares * rate, IF market state has the rates. - // This requires market.state.supplyAssets and market.state.supplyShares - const marketTotalSupplyAssets = BigInt(market.state.supplyAssets || '0'); - const marketTotalSupplyShares = BigInt(market.state.supplyShares || '1'); // Avoid div by zero - supplyAssets = - marketTotalSupplyShares > 0n ? ((BigInt(supplyShares) * marketTotalSupplyAssets) / marketTotalSupplyShares).toString() : '0'; - } else { - console.warn(`Subgraph position side 'SUPPLIER' doesn't match loan asset for market ${marketUniqueKey}`); - } - break; - case 'COLLATERAL': - // Assuming the COLLATERAL asset is always the collateral asset - if (pos.asset.id.toLowerCase() === market.collateralAsset.address.toLowerCase()) { - // Subgraph 'balance' for collateral IS THE ASSET AMOUNT - collateralAssets = balanceStr; - } else { - console.warn(`Subgraph position side 'COLLATERAL' doesn't match collateral asset for market ${marketUniqueKey}`); - } - break; - case 'BORROWER': - // Assuming the BORROWER asset is always the loan asset - if (pos.asset.id.toLowerCase() === market.loanAsset.address.toLowerCase()) { - // Subgraph returns shares for BORROWER side in `balance` - borrowShares = balanceStr; - // Calculate borrowAssets from shares - const marketTotalBorrowAssets = BigInt(market.state.borrowAssets || '0'); - const marketTotalBorrowShares = BigInt(market.state.borrowShares || '1'); // Avoid div by zero - borrowAssets = - marketTotalBorrowShares > 0n ? ((BigInt(borrowShares) * marketTotalBorrowAssets) / marketTotalBorrowShares).toString() : '0'; - } else { - console.warn(`Subgraph position side 'BORROWER' doesn't match loan asset for market ${marketUniqueKey}`); - } - break; - default: - console.warn(`Unknown position side '${pos.side}' for market ${marketUniqueKey}`); - break; - } - }); - - // Check if the user has any position (check assets) - if (supplyAssets === '0' && collateralAssets === '0' && borrowAssets === '0') { - // If all balances are zero, treat as no position found for this market - return null; // Return null as per MarketPosition type possibility - } - - const state: MarketPositionState = { - supplyAssets: supplyAssets, - supplyShares: supplyShares, - collateral: collateralAssets, // Use the direct asset amount - borrowAssets: borrowAssets, - borrowShares: borrowShares, - }; - - return { - market, - state: state, - }; - } catch (error) { - console.error(`Failed to fetch user position for market ${marketUniqueKey} from Subgraph on ${network}:`, error); - return null; // Return null on error - } -}; diff --git a/src/data-sources/subgraph/transactions.ts b/src/data-sources/subgraph/transactions.ts deleted file mode 100644 index 5091ed43..00000000 --- a/src/data-sources/subgraph/transactions.ts +++ /dev/null @@ -1,217 +0,0 @@ -import { getSubgraphUserTransactionsQuery } from '@/graphql/morpho-subgraph-queries'; -import type { TransactionFilters, TransactionResponse } from '@/hooks/queries/fetchUserTransactions'; -import type { SupportedNetworks } from '@/utils/networks'; -import { getSubgraphUrl } from '@/utils/subgraph-urls'; -import { type UserTransaction, UserTxTypes } from '@/utils/types'; -import type { - SubgraphAccountData, - SubgraphBorrowTx, - SubgraphDepositTx, - SubgraphLiquidationTx, - SubgraphRepayTx, - SubgraphTransactionResponse, - SubgraphWithdrawTx, -} from './types'; - -const transformSubgraphTransactions = ( - subgraphData: SubgraphAccountData | null | undefined, - filters: TransactionFilters, -): TransactionResponse => { - const allTransactions: UserTransaction[] = []; - - // Handle null/undefined data gracefully - if (!subgraphData) { - return { - items: [], - pageInfo: { count: 0, countTotal: 0 }, - error: null, - }; - } - - subgraphData.deposits.forEach((tx: SubgraphDepositTx) => { - const type = tx.isCollateral ? UserTxTypes.MarketSupplyCollateral : UserTxTypes.MarketSupply; - allTransactions.push({ - hash: tx.hash, - timestamp: Number.parseInt(tx.timestamp, 10), - type: type, - data: { - __typename: type, - shares: tx.shares, - assets: tx.amount, - market: { - uniqueKey: tx.market.id, - }, - }, - }); - }); - - subgraphData.withdraws.forEach((tx: SubgraphWithdrawTx) => { - const type = tx.isCollateral ? UserTxTypes.MarketWithdrawCollateral : UserTxTypes.MarketWithdraw; - allTransactions.push({ - hash: tx.hash, - timestamp: Number.parseInt(tx.timestamp, 10), - type: type, - data: { - __typename: type, - shares: tx.shares, - assets: tx.amount, - market: { - uniqueKey: tx.market.id, - }, - }, - }); - }); - - subgraphData.borrows.forEach((tx: SubgraphBorrowTx) => { - allTransactions.push({ - hash: tx.hash, - timestamp: Number.parseInt(tx.timestamp, 10), - type: UserTxTypes.MarketBorrow, - data: { - __typename: UserTxTypes.MarketBorrow, - shares: tx.shares, - assets: tx.amount, - market: { - uniqueKey: tx.market.id, - }, - }, - }); - }); - - subgraphData.repays.forEach((tx: SubgraphRepayTx) => { - allTransactions.push({ - hash: tx.hash, - timestamp: Number.parseInt(tx.timestamp, 10), - type: UserTxTypes.MarketRepay, - data: { - __typename: UserTxTypes.MarketRepay, - shares: tx.shares, - assets: tx.amount, - market: { - uniqueKey: tx.market.id, - }, - }, - }); - }); - - subgraphData.liquidations.forEach((tx: SubgraphLiquidationTx) => { - allTransactions.push({ - hash: tx.hash, - timestamp: Number.parseInt(tx.timestamp, 10), - type: UserTxTypes.MarketLiquidation, - data: { - __typename: UserTxTypes.MarketLiquidation, - shares: '0', - assets: tx.repaid, - market: { - uniqueKey: tx.market.id, - }, - }, - }); - }); - - allTransactions.sort((a, b) => b.timestamp - a.timestamp); - - // No client-side filtering needed - filtering is done at GraphQL level via market_in - const count = allTransactions.length; - const countTotal = count; - - return { - items: allTransactions, - pageInfo: { - count: count, - countTotal: countTotal, - }, - error: null, - }; -}; - -export const fetchSubgraphTransactions = async (filters: TransactionFilters, network: SupportedNetworks): Promise => { - if (filters.userAddress.length !== 1) { - console.warn('Subgraph fetcher currently supports only one user address.'); - return { - items: [], - pageInfo: { count: 0, countTotal: 0 }, - error: null, - }; - } - - const subgraphUrl = getSubgraphUrl(network); - - if (!subgraphUrl) { - const errorMsg = `Subgraph URL not found for network ${network}. Check API key and configuration.`; - console.error(errorMsg); - return { - items: [], - pageInfo: { count: 0, countTotal: 0 }, - error: errorMsg, - }; - } - - const userAddress = filters.userAddress[0].toLowerCase(); - - // Always calculate current timestamp (seconds) - const currentTimestamp = Math.floor(Date.now() / 1000); - - // Construct variables with mandatory timestamp filters - const variables: Record = { - userId: userAddress, - first: filters.first ?? 1000, - skip: filters.skip ?? 0, - timestamp_gt: 0, // Always start from time 0 - timestamp_lt: currentTimestamp, // Always end at current time - }; - - // Add market_in filter if marketUniqueKeys are provided - if (filters.marketUniqueKeys && filters.marketUniqueKeys.length > 0) { - // Convert market keys to lowercase for subgraph compatibility - variables.market_in = filters.marketUniqueKeys.map((key) => key.toLowerCase()); - } - - if (filters.timestampGte !== undefined && filters.timestampGte !== null) { - variables.timestamp_gte = filters.timestampGte; - } - if (filters.timestampLte !== undefined && filters.timestampLte !== null) { - variables.timestamp_lte = filters.timestampLte; - } - - const useMarketFilter = variables.market_in !== undefined; - - const requestBody = { - query: getSubgraphUserTransactionsQuery(useMarketFilter), - variables: variables, - }; - - try { - const response = await fetch(subgraphUrl, { - method: 'POST', - headers: { - 'Content-Type': 'application/json', - }, - body: JSON.stringify(requestBody), - }); - - const result = (await response.json()) as SubgraphTransactionResponse; - - if (result.errors) { - throw new Error(result.errors.map((e) => e.message).join(', ')); - } - - if (!result.data?.account) { - return { - items: [], - pageInfo: { count: 0, countTotal: 0 }, - error: null, - }; - } - - return transformSubgraphTransactions(result?.data?.account, filters); - } catch (err) { - console.error(`Error fetching Subgraph transactions from ${subgraphUrl}:`, err); - return { - items: [], - pageInfo: { count: 0, countTotal: 0 }, - error: err instanceof Error ? err.message : 'Unknown Subgraph error occurred', - }; - } -}; diff --git a/src/data-sources/subgraph/types.ts b/src/data-sources/subgraph/types.ts deleted file mode 100644 index 3b90d805..00000000 --- a/src/data-sources/subgraph/types.ts +++ /dev/null @@ -1,65 +0,0 @@ -import type { Address } from 'viem'; - -type SubgraphAsset = { - id: string; // Asset address - symbol?: string; // Optional symbol - decimals?: number; // Optional decimals -}; - -type SubgraphMarketReference = { - id: string; // Market unique key -}; - -type SubgraphAccountReference = { - id: Address; -}; - -type SubgraphBaseTx = { - id: string; // Transaction ID (e.g., hash + log index) - hash: string; // Transaction hash - timestamp: string; // Timestamp string (needs conversion to number) - market: SubgraphMarketReference; // Reference to the market - asset: SubgraphAsset; // Reference to the asset involved - amount: string; // Amount of the asset (loan/collateral) - shares: string; // Amount in shares - accountActor?: SubgraphAccountReference; // Optional: msg.sender for deposits etc. -}; - -export type SubgraphDepositTx = SubgraphBaseTx & { - isCollateral: boolean; // True for SupplyCollateral, False for Supply -}; - -export type SubgraphWithdrawTx = SubgraphBaseTx & { - isCollateral: boolean; // True for WithdrawCollateral, False for Withdraw -}; - -export type SubgraphBorrowTx = SubgraphBaseTx; - -export type SubgraphRepayTx = SubgraphBaseTx; - -export type SubgraphLiquidationTx = { - id: string; - hash: string; - timestamp: string; - market: SubgraphMarketReference; - liquidator: SubgraphAccountReference; // The account calling liquidate - amount: string; // Collateral seized amount (string) - repaid: string; // Debt repaid amount (string) -}; - -// Structure based on the example query { account(id: ...) { ... } } -export type SubgraphAccountData = { - deposits: SubgraphDepositTx[]; - withdraws: SubgraphWithdrawTx[]; - borrows: SubgraphBorrowTx[]; - repays: SubgraphRepayTx[]; - liquidations: SubgraphLiquidationTx[]; // Assuming liquidations where user was liquidated -}; - -// The full response structure from the subgraph query -export type SubgraphTransactionResponse = { - data: { - account: SubgraphAccountData | null; - }; - errors?: { message: string }[]; -}; diff --git a/src/data-sources/user-position.ts b/src/data-sources/user-position.ts index b62f891d..3bac352f 100644 --- a/src/data-sources/user-position.ts +++ b/src/data-sources/user-position.ts @@ -1,9 +1,8 @@ -import { hasEnvioIndexer, supportsMorphoApi } from '@/config/dataSources'; +import { hasEnvioIndexer } from '@/config/dataSources'; import { fetchEnvioUserPositionForMarket } from '@/data-sources/envio/positions'; import { fetchMarketDetails } from '@/data-sources/market-details'; import { fetchMorphoUserPositionForMarket } from '@/data-sources/morpho-api/positions'; import { getErrorMessage, logDataSourceEvent } from '@/data-sources/shared/source-debug'; -import { fetchSubgraphUserPositionForMarket } from '@/data-sources/subgraph/positions'; import type { CustomRpcUrls } from '@/stores/useCustomRpc'; import { getChainScopedMarketKey } from '@/utils/marketIdentity'; import type { SupportedNetworks } from '@/utils/networks'; @@ -101,51 +100,28 @@ export const fetchUserPositionForMarket = async ( } } - if (supportsMorphoApi(chainId)) { - try { - const morphoPosition = getValidatedPosition({ - chainId, - marketUniqueKey, - position: await fetchMorphoUserPositionForMarket(marketUniqueKey, userAddress, chainId), - source: 'Morpho API', - }); + try { + const morphoPosition = getValidatedPosition({ + chainId, + marketUniqueKey, + position: await fetchMorphoUserPositionForMarket(marketUniqueKey, userAddress, chainId), + source: 'Morpho API', + }); - if (morphoPosition) { - logDataSourceEvent('user-position', 'using Morpho API fallback for position', { - chainId, - marketUniqueKey, - }); - return hydratePositionMarket(morphoPosition, chainId, options); - } - } catch (error) { - logDataSourceEvent('user-position', 'Morpho API position fetch failed, falling back to subgraph', { + if (morphoPosition) { + logDataSourceEvent('user-position', 'using Morpho API fallback for position', { chainId, marketUniqueKey, - reason: getErrorMessage(error), }); + return hydratePositionMarket(morphoPosition, chainId, options); } - } - - logDataSourceEvent('user-position', 'using subgraph fallback for position', { - chainId, - marketUniqueKey, - }); - let subgraphPosition: MarketPosition | null = null; - - try { - subgraphPosition = await fetchSubgraphUserPositionForMarket(marketUniqueKey, userAddress, chainId); } catch (error) { - logDataSourceEvent('user-position', 'subgraph position fallback failed', { + logDataSourceEvent('user-position', 'Morpho API position fetch failed', { chainId, marketUniqueKey, reason: getErrorMessage(error), - }); - return null; - } - - if (!subgraphPosition || !isMatchingPosition(subgraphPosition, marketUniqueKey, chainId)) { - return null; + }); } - return hydratePositionMarket(subgraphPosition, chainId, options); + return null; }; diff --git a/src/features/history/components/transaction-history-preview.tsx b/src/features/history/components/transaction-history-preview.tsx index 9abae815..22409d74 100644 --- a/src/features/history/components/transaction-history-preview.tsx +++ b/src/features/history/components/transaction-history-preview.tsx @@ -13,6 +13,7 @@ import { MarketIdentity, MarketIdentityMode } from '@/features/markets/component import { useProcessedMarkets } from '@/hooks/useProcessedMarkets'; import { useUserTransactionsQuery } from '@/hooks/queries/useUserTransactionsQuery'; import { formatReadable } from '@/utils/balance'; +import { getChainScopedMarketKey } from '@/utils/marketIdentity'; import { groupTransactionsByHash, getWithdrawals, getSupplies } from '@/utils/transactionGrouping'; import { getTruncatedAssetName } from '@/utils/oracle'; import type { Market } from '@/utils/types'; @@ -57,6 +58,14 @@ export function TransactionHistoryPreview({ return `/positions/${account}`; }, [account, viewAllHref]); + const findTransactionMarket = (marketUniqueKey: string, transactionChainId: number): Market | undefined => { + return allMarkets.find( + (market) => + getChainScopedMarketKey(market.uniqueKey, market.morphoBlue.chain.id) === + getChainScopedMarketKey(marketUniqueKey, transactionChainId), + ); + }; + const actions = ( ) : ( history.map((group) => { - const chainIdForTx = - chainId ?? allMarkets.find((m) => m.uniqueKey === group.transactions[0].data.market.uniqueKey)?.morphoBlue.chain.id; + const chainIdForTx = chainId ?? group.transactions[0]?.chainId; // Handle rebalances if (group.isMetaAction && group.metaActionType === 'rebalance') { @@ -139,10 +147,10 @@ export function TransactionHistoryPreview({ const firstWithdrawal = withdrawals[0]; const firstSupply = supplies[0]; const fromMarket = firstWithdrawal - ? (allMarkets.find((m) => m.uniqueKey === firstWithdrawal.data.market.uniqueKey) as Market | undefined) + ? findTransactionMarket(firstWithdrawal.data.market.uniqueKey, firstWithdrawal.chainId) : undefined; const toMarket = firstSupply - ? (allMarkets.find((m) => m.uniqueKey === firstSupply.data.market.uniqueKey) as Market | undefined) + ? findTransactionMarket(firstSupply.data.market.uniqueKey, firstSupply.chainId) : undefined; const loanAssetDecimals = fromMarket?.loanAsset.decimals ?? toMarket?.loanAsset.decimals ?? 18; const loanAssetSymbol = fromMarket?.loanAsset.symbol ?? toMarket?.loanAsset.symbol ?? ''; @@ -215,8 +223,10 @@ export function TransactionHistoryPreview({ // Handle multiple deposits if (group.isMetaAction && group.metaActionType === 'deposits') { const firstTx = group.transactions[0]; - const market = allMarkets.find((m) => m.uniqueKey === firstTx.data.market.uniqueKey) as Market | undefined; - const marketCount = new Set(group.transactions.map((t) => t.data.market.uniqueKey)).size; + const market = findTransactionMarket(firstTx.data.market.uniqueKey, firstTx.chainId); + const marketCount = new Set( + group.transactions.map((transaction) => getChainScopedMarketKey(transaction.data.market.uniqueKey, transaction.chainId)), + ).size; const hasMoreMarkets = marketCount > 1; return ( @@ -268,8 +278,10 @@ export function TransactionHistoryPreview({ // Handle multiple withdrawals if (group.isMetaAction && group.metaActionType === 'withdrawals') { const firstTx = group.transactions[0]; - const market = allMarkets.find((m) => m.uniqueKey === firstTx.data.market.uniqueKey) as Market | undefined; - const marketCount = new Set(group.transactions.map((t) => t.data.market.uniqueKey)).size; + const market = findTransactionMarket(firstTx.data.market.uniqueKey, firstTx.chainId); + const marketCount = new Set( + group.transactions.map((transaction) => getChainScopedMarketKey(transaction.data.market.uniqueKey, transaction.chainId)), + ).size; const hasMoreMarkets = marketCount > 1; return ( @@ -319,7 +331,7 @@ export function TransactionHistoryPreview({ } const tx = group.transactions[0]; - const market = allMarkets.find((m) => m.uniqueKey === tx.data.market.uniqueKey) as Market | undefined; + const market = findTransactionMarket(tx.data.market.uniqueKey, tx.chainId); const sign = tx.type === 'MarketSupply' ? '+' : '-'; const side = tx.type === 'MarketSupply' ? 'Supply' : 'Withdraw'; diff --git a/src/features/market-detail/components/borrows-table.tsx b/src/features/market-detail/components/borrows-table.tsx index 693697bd..55b857de 100644 --- a/src/features/market-detail/components/borrows-table.tsx +++ b/src/features/market-detail/components/borrows-table.tsx @@ -34,7 +34,7 @@ export function BorrowsTable({ chainId, market, minAssets, onOpenFiltersModal }: isLoading, isFetching, error, - } = useMarketBorrows(market?.uniqueKey, market.loanAsset.address, chainId, minAssets, currentPage, pageSize); + } = useMarketBorrows(market?.uniqueKey, chainId, minAssets, currentPage, pageSize); const borrows = paginatedData?.items ?? []; const totalCount = paginatedData?.totalCount ?? 0; diff --git a/src/features/market-detail/components/supplies-table.tsx b/src/features/market-detail/components/supplies-table.tsx index 2e47b9e6..dd39af2d 100644 --- a/src/features/market-detail/components/supplies-table.tsx +++ b/src/features/market-detail/components/supplies-table.tsx @@ -33,7 +33,7 @@ export function SuppliesTable({ chainId, market, minAssets, onOpenFiltersModal } data: paginatedData, isLoading, isFetching, - } = useMarketSupplies(market?.uniqueKey, market.loanAsset.address, chainId, minAssets, currentPage, pageSize); + } = useMarketSupplies(market?.uniqueKey, chainId, minAssets, currentPage, pageSize); const supplies = paginatedData?.items ?? []; const totalCount = paginatedData?.totalCount ?? 0; diff --git a/src/features/position-detail/components/history-tab.tsx b/src/features/position-detail/components/history-tab.tsx index 75807c64..729b53da 100644 --- a/src/features/position-detail/components/history-tab.tsx +++ b/src/features/position-detail/components/history-tab.tsx @@ -25,6 +25,7 @@ import { useUserTransactionsQuery } from '@/hooks/queries/useUserTransactionsQue import { useDisclosure } from '@/hooks/useDisclosure'; import { useStyledToast } from '@/hooks/useStyledToast'; import { formatReadable } from '@/utils/balance'; +import { getChainScopedMarketKey } from '@/utils/marketIdentity'; import { UserTxTypes, type Market } from '@/utils/types'; import { actionTypeToText } from '@/utils/morpho'; import type { GroupedPosition, UserTransaction } from '@/utils/types'; @@ -296,7 +297,11 @@ export function HistoryTab({ groupedPosition, chainId, userAddress, transactions history.map((tx, index) => { if (!tx.data.market) return null; - const market = allMarkets.find((m) => m.uniqueKey === tx.data.market.uniqueKey) as Market | undefined; + const market = allMarkets.find( + (candidateMarket) => + getChainScopedMarketKey(candidateMarket.uniqueKey, candidateMarket.morphoBlue.chain.id) === + getChainScopedMarketKey(tx.data.market.uniqueKey, tx.chainId), + ) as Market | undefined; if (!market) return null; const isSupply = tx.type === UserTxTypes.MarketSupply; diff --git a/src/features/position-detail/position-view.tsx b/src/features/position-detail/position-view.tsx index 4b3afdc9..b7eedd35 100644 --- a/src/features/position-detail/position-view.tsx +++ b/src/features/position-detail/position-view.tsx @@ -10,6 +10,7 @@ import { useProcessedMarkets } from '@/hooks/useProcessedMarkets'; import { useTokensQuery } from '@/hooks/queries/useTokensQuery'; import { usePositionDetailPreferences } from '@/stores/usePositionDetailPreferences'; import { usePortfolioBookmarks } from '@/stores/usePortfolioBookmarks'; +import { getChainScopedMarketKey } from '@/utils/marketIdentity'; import { usePositionDetailData } from './hooks/usePositionDetailData'; import { PositionBreadcrumbs } from './components/position-breadcrumbs'; import { PositionHeader } from './components/position-header'; @@ -80,8 +81,10 @@ export default function PositionDetailContent({ chainId, loanAssetAddress, userA // Filter transactions relevant to this position's markets const relevantTransactions = useMemo(() => { if (!currentPosition) return []; - const marketKeys = new Set(currentPosition.markets.map((m) => m.market.uniqueKey.toLowerCase())); - return transactions.filter((tx) => tx.data.market && marketKeys.has(tx.data.market.uniqueKey.toLowerCase())); + const marketKeys = new Set( + currentPosition.markets.map((position) => getChainScopedMarketKey(position.market.uniqueKey, position.market.morphoBlue.chain.id)), + ); + return transactions.filter((tx) => tx.data.market && marketKeys.has(getChainScopedMarketKey(tx.data.market.uniqueKey, tx.chainId))); }, [transactions, currentPosition]); // Handle refetch diff --git a/src/graphql/morpho-api-queries.ts b/src/graphql/morpho-api-queries.ts index 199c08cd..40477f23 100644 --- a/src/graphql/morpho-api-queries.ts +++ b/src/graphql/morpho-api-queries.ts @@ -147,6 +147,52 @@ export const marketsFragment = ` } `; +const commonMarketMetadataFields = ` +uniqueKey +morphoBlue { + chain { + id + } +} +warnings { + type + level + __typename +} +supplyingVaults { + address +} +`; + +export const marketMetadataFragment = ` + fragment MarketMetadataFields on Market { + ${commonMarketMetadataFields} + } +`; + +export const marketsMetadataQuery = ` + query getMarketsMetadata($first: Int, $skip: Int, $where: MarketFilters) { + markets(first: $first, skip: $skip, where: $where) { + items { + ...MarketMetadataFields + } + pageInfo { + countTotal + } + } + } + ${marketMetadataFragment} +`; + +export const marketMetadataDetailQuery = ` + query getMarketMetadata($uniqueKey: String!, $chainId: Int) { + marketByUniqueKey(uniqueKey: $uniqueKey, chainId: $chainId) { + ...MarketMetadataFields + } + } + ${marketMetadataFragment} +`; + export const marketsQuery = ` query getMarkets($first: Int, $skip: Int, $where: MarketFilters) { markets(first: $first, skip: $skip, where: $where) { @@ -353,6 +399,11 @@ export const userTransactionsQuery = ` assets market { uniqueKey + morphoBlue { + chain { + id + } + } } } } diff --git a/src/graphql/morpho-subgraph-queries.ts b/src/graphql/morpho-subgraph-queries.ts deleted file mode 100644 index c9697de9..00000000 --- a/src/graphql/morpho-subgraph-queries.ts +++ /dev/null @@ -1,492 +0,0 @@ -export const tokenFragment = ` - fragment TokenFields on Token { - id - name - symbol - decimals - lastPriceUSD - } -`; - -export const oracleFragment = ` - fragment OracleFields on Oracle { - id - oracleAddress - oracleSource - isActive - isUSD - } -`; - -export const marketFragment = ` - fragment SubgraphMarketFields on Market { - id - lltv - irm - inputToken { # collateralAsset - ...TokenFields - } - inputTokenPriceUSD - borrowedToken { # loanAsset - ...TokenFields - } - totalDepositBalanceUSD - totalBorrowBalanceUSD - totalSupplyShares - totalBorrowShares - totalSupply - totalBorrow - totalCollateral - fee - - name - isActive - canBorrowFrom - canUseAsCollateral - maximumLTV - liquidationThreshold - liquidationPenalty - createdTimestamp - createdBlockNumber - inputTokenBalance - variableBorrowedTokenBalance - totalValueLockedUSD - lastUpdate - reserves - reserveFactor - oracle { - ...OracleFields - } - rates { - id - rate # APY - side - type - } - protocol { - id - network # Chain Name - protocol # Protocol Name - } - } - ${tokenFragment} - ${oracleFragment} -`; - -export const marketsQuery = ` - query getSubgraphMarkets($first: Int, $skip: Int, $where: Market_filter, $network: String) { - markets( - first: $first, - skip: $skip, - where: $where, - orderBy: totalValueLockedUSD, - orderDirection: desc, - ) { - ...SubgraphMarketFields - } - } - ${marketFragment} -`; - -// Add other queries as needed, e.g., for user positions based on subgraph schema - -export const marketQuery = ` - query getSubgraphMarket($id: Bytes!) { - market(id: $id) { - ...SubgraphMarketFields - } - } - ${marketFragment} -`; - -// --- Added for Historical Data --- - -export const marketHourlySnapshotFragment = ` - fragment MarketHourlySnapshotFields on MarketHourlySnapshot { - id - timestamp - market { - id - inputToken { - ...TokenFields - } - borrowedToken { - ...TokenFields - } - } - rates { - id - rate # APY - side - type - } - totalDepositBalanceUSD - totalBorrowBalanceUSD - inputTokenBalance - inputTokenPriceUSD - hourlyDepositUSD - hourlyBorrowUSD - outputTokenSupply - variableBorrowedTokenBalance - # Note: The subgraph schema for snapshots doesn't seem to directly expose - # total native supply/borrow amounts historically, only USD values and hourly deltas. - } -`; - -export const marketHourlySnapshotsQuery = ` - query getMarketHourlySnapshots($marketId: Bytes!, $startTimestamp: BigInt!, $endTimestamp: BigInt!) { - marketHourlySnapshots( - first: 1000, # Subgraph max limit - orderBy: timestamp, - orderDirection: asc, - where: { - market: $marketId, - timestamp_gte: $startTimestamp, - timestamp_lte: $endTimestamp - } - ) { - ...MarketHourlySnapshotFields - } - } - ${marketHourlySnapshotFragment} - ${tokenFragment} # Ensure TokenFields fragment is included -`; -// --- End Added Section --- - -// --- Query for Market Supplies/Withdraws (Deposits/Withdraws of Loan Asset) --- -export const marketDepositsWithdrawsQuery = ` - query getMarketDepositsWithdraws($marketId: Bytes!, $loanAssetId: Bytes!, $minAssets: String, $first: Int!, $skip: Int!) { - deposits( - first: $first, - skip: $skip, - orderBy: timestamp, - orderDirection: desc, - where: { market: $marketId, asset: $loanAssetId, amount_gt: $minAssets } - ) { - amount - account { id } - timestamp - hash - } - withdraws( - first: $first, - skip: $skip, - orderBy: timestamp, - orderDirection: desc, - where: { market: $marketId, asset: $loanAssetId, amount_gt: $minAssets } - ) { - amount - account { id } - timestamp - hash - } - } -`; -// --- End Query --- - -// --- Query for Market Borrows/Repays (Borrows/Repays of Loan Asset) --- -export const marketBorrowsRepaysQuery = ` - query getMarketBorrowsRepays($marketId: Bytes!, $loanAssetId: Bytes!, $minAssets: BigInt, $first: Int!, $skip: Int!) { - borrows( - first: $first, - skip: $skip, - orderBy: timestamp, - orderDirection: desc, - where: { market: $marketId, asset: $loanAssetId, amount_gt: $minAssets } - ) { - amount - account { id } - timestamp - hash - } - repays( - first: $first, - skip: $skip, - orderBy: timestamp, - orderDirection: desc, - where: { market: $marketId, asset: $loanAssetId, amount_gt: $minAssets } - ) { - amount - account { id } - timestamp - hash - } - } -`; -// --- End Query --- - -// --- Query for Market Liquidations and Bad Debt --- -export const marketLiquidationsAndBadDebtQuery = ` - query getMarketLiquidations($marketId: Bytes!) { - liquidates( - first: 1000, - where: { market: $marketId }, - orderBy: timestamp, - orderDirection: desc - ) { - id - hash - timestamp - repaid - amount - liquidator { id } - } - badDebtRealizations( - first: 1000, - where: { market: $marketId } - ) { - badDebt - liquidation { id } - } - } -`; -// --- End Query --- - -// --- Query to check which markets have had at least one liquidation --- -export const subgraphMarketsWithLiquidationCheckQuery = ` - query getSubgraphMarketsWithLiquidationCheck( - $first: Int, - $where: Market_filter, - ) { - markets( - first: $first, - where: $where, - orderBy: totalValueLockedUSD, - orderDirection: desc, - ) { - id # Market ID (uniqueKey) - liquidates(first: 1) { # Fetch only one to check existence - id - } - } - } -`; - -// --- Query for User Position Market IDs --- -export const subgraphUserPositionMarketsQuery = ` - query GetUserPositionMarkets($userId: ID!) { - account(id: $userId) { - positions(first: 1000) { # Assuming a user won't have > 1000 positions - market { - id # Market unique key - } - } - } - } -`; -// --- End Query --- - -// --- Query for User Position in a Single Market --- -export const subgraphUserMarketPositionQuery = ` - query GetUserMarketPosition($marketId: ID!, $userId: ID!) { - positions( - where: { market: $marketId, account: $userId } - ) { - id - asset { - id # Token address - } - isCollateral - balance - side # SUPPLIER, BORROWER, COLLATERAL - } - } -`; -// --- End Query --- - -export const getSubgraphUserTransactionsQuery = (useMarketFilter: boolean) => { - // only append this in where if marketIn is defined - const additionalQuery = useMarketFilter ? 'market_in: $market_in' : ''; - - return ` - query GetUserTransactions( - $userId: ID! - $first: Int! - $skip: Int! - $timestamp_gt: BigInt! - $timestamp_lt: BigInt! - ${useMarketFilter ? '$market_in: [Bytes!]}' : ''} - ) { - account(id: $userId) { - deposits( - first: $first - skip: $skip - orderBy: timestamp - orderDirection: desc - where: { - timestamp_gt: $timestamp_gt - timestamp_lt: $timestamp_lt - ${additionalQuery} - } - ) { - id - hash - timestamp - isCollateral - market { id } - asset { id } - amount - shares - accountActor { id } - } - withdraws( - first: $first - skip: $skip - orderBy: timestamp - orderDirection: desc - where: { - timestamp_gt: $timestamp_gt - timestamp_lt: $timestamp_lt - ${additionalQuery} - } - ) { - id - hash - timestamp - isCollateral - market { id } - asset { id } - amount - shares - accountActor { id } - } - borrows( - first: $first - skip: $skip - orderBy: timestamp - orderDirection: desc - where: { - timestamp_gt: $timestamp_gt - timestamp_lt: $timestamp_lt - ${additionalQuery} - } - ) { - id - hash - timestamp - market { id } - asset { id } - amount - shares - accountActor { id } - } - repays( - first: $first - skip: $skip - orderBy: timestamp - orderDirection: desc - where: { - timestamp_gt: $timestamp_gt - timestamp_lt: $timestamp_lt - ${additionalQuery} - } - ) { - id - hash - timestamp - market { id } - asset { id } - amount - shares - accountActor { id } - } - liquidations( - first: $first - skip: $skip - orderBy: timestamp - orderDirection: desc - where: { - timestamp_gt: $timestamp_gt - timestamp_lt: $timestamp_lt - ${additionalQuery} - } - ) { - id - hash - timestamp - market { id } - liquidator { id } - amount # Collateral seized - repaid # Debt repaid - } - } - } -`; -}; - -export const marketPositionsQuery = ` - query getMarketPositions($market: String!, $minShares: BigInt!, $first: Int!, $skip: Int!) { - positions( - where: { - shares_gt: $minShares - market: $market - } - orderBy: shares - orderDirection: desc - first: $first - skip: $skip - ) { - shares - account { - id - } - } - } -`; - -// Query for market suppliers (positions with side: SUPPLIER, isCollateral: false) -export const marketSuppliersQuery = ` - query getMarketSuppliers($market: String!, $minShares: BigInt!, $first: Int!, $skip: Int!) { - positions( - where: { - shares_gt: $minShares - side: SUPPLIER - isCollateral: false - market: $market - } - orderBy: shares - orderDirection: desc - first: $first - skip: $skip - ) { - shares - account { - id - } - } - } -`; - -// Query for market borrowers (positions with side: BORROWER) including collateral and market totals for conversion -export const marketBorrowersQuery = ` - query getMarketBorrowers($market: String!, $minShares: BigInt!, $first: Int!, $skip: Int!) { - market(id: $market) { - totalBorrow - totalBorrowShares - } - positions( - where: { - shares_gt: $minShares - side: BORROWER - market: $market - } - orderBy: shares - orderDirection: desc - first: $first - skip: $skip - ) { - shares - account { - id - positions( - where: { - side: COLLATERAL - market: $market - } - ) { - balance - } - } - } - } -`; diff --git a/src/hooks/queries/fetchUserTransactions.ts b/src/hooks/queries/fetchUserTransactions.ts index 3c7e84b0..52e98da4 100644 --- a/src/hooks/queries/fetchUserTransactions.ts +++ b/src/hooks/queries/fetchUserTransactions.ts @@ -1,17 +1,16 @@ -import { hasEnvioIndexer, supportsMorphoApi } from '@/config/dataSources'; +import { hasEnvioIndexer } from '@/config/dataSources'; import { fetchEnvioTransactions } from '@/data-sources/envio/transactions'; import { fetchMorphoTransactions } from '@/data-sources/morpho-api/transactions'; -import { fetchSubgraphTransactions } from '@/data-sources/subgraph/transactions'; import { isSupportedChain } from '@/utils/networks'; import type { UserTransaction } from '@/utils/types'; /** * Filters for fetching user transactions. - * Requires a single chainId - for multi-chain queries, use useUserTransactionsQuery with paginate: true. */ export type TransactionFilters = { userAddress: string[]; - chainId: number; + chainId?: number; + chainIds?: number[]; marketUniqueKeys?: string[]; timestampGte?: number; timestampLte?: number; @@ -30,72 +29,98 @@ export type TransactionResponse = { error: string | null; }; +const resolveTransactionChainIds = (filters: TransactionFilters): number[] => { + const chainIds = filters.chainIds ?? (filters.chainId != null ? [filters.chainId] : []); + return [...new Set(chainIds)]; +}; + /** - * Fetches user transactions for a SINGLE chain from Morpho API or Subgraph. - * For multi-chain queries, use useUserTransactionsQuery with paginate: true. + * Fetches user transactions for one or more chains using the configured indexer source. * - * @param filters - Transaction filters (chainId is required) + * @param filters - Transaction filters * @returns Promise resolving to transaction response */ export async function fetchUserTransactions(filters: TransactionFilters): Promise { - const { chainId } = filters; + const chainIds = resolveTransactionChainIds(filters); - // Validate chainId - if (!isSupportedChain(chainId)) { - console.warn(`Unsupported chain: ${chainId}`); + if (chainIds.length === 0) { return { items: [], pageInfo: { count: 0, countTotal: 0 }, - error: `Unsupported chain: ${chainId}`, + error: 'At least one chainId is required', }; } - // Check subgraph user address limitation - if (!hasEnvioIndexer() && !supportsMorphoApi(chainId) && filters.userAddress.length !== 1) { - const errorMsg = 'Subgraph data source requires exactly one user address.'; - console.error(errorMsg); + const unsupportedChainId = chainIds.find((chainId) => !isSupportedChain(chainId)); + if (unsupportedChainId != null) { + console.warn(`Unsupported chain: ${unsupportedChainId}`); return { items: [], pageInfo: { count: 0, countTotal: 0 }, - error: errorMsg, + error: `Unsupported chain: ${unsupportedChainId}`, }; } + const normalizedFilters: TransactionFilters = { + ...filters, + chainIds, + chainId: chainIds.length === 1 ? chainIds[0] : undefined, + }; + if (hasEnvioIndexer()) { try { - const response = await fetchEnvioTransactions(filters); + const response = await fetchEnvioTransactions(normalizedFilters); if (!response.error) { return response; } } catch (envioError) { - console.warn(`Envio failed for chain ${chainId}, falling back to legacy sources:`, envioError); + console.warn(`Envio failed for chains ${chainIds.join(',')}, falling back to Morpho API:`, envioError); } } - // Try Morpho API next if supported - if (supportsMorphoApi(chainId)) { - try { - const response = await fetchMorphoTransactions(filters); - if (!response.error) { - return response; - } - // Morpho API returned an error, fall through to Subgraph - } catch (morphoError) { - console.warn(`Morpho API failed for chain ${chainId}, falling back to Subgraph:`, morphoError); - // Fall through to Subgraph + return fetchMorphoTransactions(normalizedFilters); +} + +export async function fetchAllUserTransactions( + filters: Omit & { + chainId?: number; + chainIds?: number[]; + }, + options: { + pageSize?: number; + maxPages?: number; + } = {}, +): Promise { + const pageSize = options.pageSize ?? 1000; + const maxPages = options.maxPages ?? 50; + const items: UserTransaction[] = []; + + for (let page = 0; page < maxPages; page += 1) { + const response = await fetchUserTransactions({ + ...filters, + first: pageSize, + skip: page * pageSize, + }); + + if (response.error) { + return response; } - } - // Final fallback to Subgraph - try { - return await fetchSubgraphTransactions(filters, chainId); - } catch (subgraphError) { - const errorMsg = `Failed to fetch transactions: ${(subgraphError as Error)?.message ?? 'Unknown error'}`; - console.error(errorMsg); - return { - items: [], - pageInfo: { count: 0, countTotal: 0 }, - error: errorMsg, - }; + items.push(...response.items); + + if (response.items.length < pageSize) { + break; + } } + + items.sort((left, right) => right.timestamp - left.timestamp); + + return { + items, + pageInfo: { + count: items.length, + countTotal: items.length, + }, + error: null, + }; } diff --git a/src/hooks/queries/useMarketMetadataQuery.ts b/src/hooks/queries/useMarketMetadataQuery.ts new file mode 100644 index 00000000..f4b68266 --- /dev/null +++ b/src/hooks/queries/useMarketMetadataQuery.ts @@ -0,0 +1,22 @@ +import { useQuery } from '@tanstack/react-query'; +import { fetchMorphoMarketMetadata } from '@/data-sources/morpho-api/market-metadata'; +import type { SupportedNetworks } from '@/utils/networks'; +import type { MarketMetadata } from '@/utils/types'; + +export const useMarketMetadataQuery = (uniqueKey: string | undefined, chainId: SupportedNetworks | undefined) => { + return useQuery({ + queryKey: ['market-metadata', uniqueKey, chainId], + queryFn: async () => { + if (!uniqueKey || !chainId) { + return null; + } + + return fetchMorphoMarketMetadata(uniqueKey, chainId); + }, + enabled: !!uniqueKey && !!chainId, + staleTime: 5 * 60 * 1000, + refetchInterval: 5 * 60 * 1000, + refetchOnWindowFocus: true, + placeholderData: (previousData) => previousData ?? null, + }); +}; diff --git a/src/hooks/queries/useMarketsMetadataQuery.ts b/src/hooks/queries/useMarketsMetadataQuery.ts new file mode 100644 index 00000000..858c6e1e --- /dev/null +++ b/src/hooks/queries/useMarketsMetadataQuery.ts @@ -0,0 +1,17 @@ +import { useQuery } from '@tanstack/react-query'; +import { fetchMorphoMarketsMetadataMultiChain } from '@/data-sources/morpho-api/market-metadata'; +import { ALL_SUPPORTED_NETWORKS } from '@/utils/networks'; +import type { MarketMetadata } from '@/utils/types'; + +const EMPTY_MARKET_METADATA_MAP = new Map(); + +export const useMarketsMetadataQuery = () => { + return useQuery({ + queryKey: ['markets-metadata'], + queryFn: () => fetchMorphoMarketsMetadataMultiChain(ALL_SUPPORTED_NETWORKS), + staleTime: 5 * 60 * 1000, + refetchInterval: 5 * 60 * 1000, + refetchOnWindowFocus: true, + placeholderData: () => EMPTY_MARKET_METADATA_MAP, + }); +}; diff --git a/src/hooks/queries/useMarketsQuery.ts b/src/hooks/queries/useMarketsQuery.ts index b761c5f2..50c144bd 100644 --- a/src/hooks/queries/useMarketsQuery.ts +++ b/src/hooks/queries/useMarketsQuery.ts @@ -8,9 +8,9 @@ import type { Market } from '@/utils/types'; * Fetches markets from all supported networks using React Query. * * Data fetching strategy: - * - Uses the shared market catalog adapter to fetch all supported chains in one go - * - Merges Morpho API metadata with Envio state when Envio is configured - * - Falls back to per-network Morpho/Subgraph fetching only if the cross-chain path fails + * - Uses the shared indexed market catalog adapter to fetch all supported chains in one go + * - Uses Envio as the primary indexed source when configured + * - Falls back to Morpho API only if Envio is unavailable or returns no usable markets * - Applies basic filtering (required fields, supported chains) * * Cache behavior: diff --git a/src/hooks/queries/useUserTransactionsQuery.ts b/src/hooks/queries/useUserTransactionsQuery.ts index 19e3bbdc..138ecd97 100644 --- a/src/hooks/queries/useUserTransactionsQuery.ts +++ b/src/hooks/queries/useUserTransactionsQuery.ts @@ -1,11 +1,10 @@ import { useQuery } from '@tanstack/react-query'; -import { fetchUserTransactions, type TransactionFilters, type TransactionResponse } from './fetchUserTransactions'; +import { fetchAllUserTransactions, fetchUserTransactions, type TransactionFilters, type TransactionResponse } from './fetchUserTransactions'; import { ALL_SUPPORTED_NETWORKS } from '@/utils/networks'; -import type { UserTransaction } from '@/utils/types'; /** * Filter options for the hook. - * - For non-paginated queries: `chainId` is required (single chain) + * - For non-paginated queries: `chainId` is typically used for a single chain view * - For paginated queries: `chainIds` can be used for multi-chain, or `chainId` for single chain */ type HookTransactionFilters = Omit & { @@ -19,7 +18,6 @@ type UseUserTransactionsQueryOptions = { /** * When true, automatically paginates to fetch ALL transactions. * Required when using chainIds with multiple values. - * For multi-chain queries, fetches all chains in parallel. */ paginate?: boolean; /** Page size for pagination (default 1000) */ @@ -27,13 +25,13 @@ type UseUserTransactionsQueryOptions = { }; /** - * Fetches user transactions from Morpho API or Subgraph using React Query. + * Fetches user transactions from the shared indexed history adapter using React Query. * * Data fetching strategy: - * - For non-paginated queries: requires single chainId, fetches with skip/first - * - For paginated queries: can use multiple chainIds, fetches ALL data in parallel - * - Tries Morpho API first (if supported for the network) - * - Falls back to Subgraph if API fails or not supported + * - For non-paginated queries: fetches with skip/first for the requested chain scope + * - For paginated queries: fetches ALL data through the shared adapter pagination loop + * - Tries Envio first when configured + * - Falls back to Morpho API */ export const useUserTransactionsQuery = (options: UseUserTransactionsQueryOptions) => { const { filters, enabled = true, paginate = false, pageSize = 1000 } = options; @@ -55,66 +53,28 @@ export const useUserTransactionsQuery = (options: UseUserTransactionsQueryOption pageSize, ], queryFn: async () => { - if (paginate) { - // Paginate mode: fetch ALL transactions, supports multi-chain - const chainIds = filters.chainIds ?? (filters.chainId ? [filters.chainId] : ALL_SUPPORTED_NETWORKS); - - // Helper to fetch all pages for one chain - const fetchAllForChain = async (chainId: number): Promise => { - const items: UserTransaction[] = []; - let skip = 0; - let hasMore = true; - - while (hasMore) { - const response = await fetchUserTransactions({ - ...filters, - chainId, - first: pageSize, - skip, - }); - - items.push(...response.items); - skip += response.items.length; - - // Stop if we got fewer items than requested (last page) - hasMore = response.items.length >= pageSize; - - // Safety: max 50 pages per chain to prevent infinite loops - if (skip >= 50 * pageSize) { - console.warn(`Transaction pagination limit reached for chain ${chainId} (50 pages)`); - break; - } - } + const chainIds = filters.chainIds ?? (filters.chainId ? [filters.chainId] : ALL_SUPPORTED_NETWORKS); - return items; - }; - - // Fetch ALL chains IN PARALLEL - const results = await Promise.all(chainIds.map(fetchAllForChain)); - const allItems = results.flat(); - - // Sort combined results by timestamp (descending) - allItems.sort((a, b) => b.timestamp - a.timestamp); - - return { - items: allItems, - pageInfo: { - count: allItems.length, - countTotal: allItems.length, + if (paginate) { + return fetchAllUserTransactions( + { + ...filters, + chainIds, + }, + { + pageSize, }, - error: null, - }; + ); } - // Non-paginate mode: requires single chainId - if (!filters.chainId) { - throw new Error('chainId is required for non-paginated queries. Use paginate: true for multi-chain queries.'); + if (chainIds.length === 0) { + throw new Error('At least one chainId is required.'); } - // Simple case: fetch once with limit return await fetchUserTransactions({ ...filters, - chainId: filters.chainId, + chainIds, + chainId: chainIds.length === 1 ? chainIds[0] : undefined, first: filters.first ?? pageSize, }); }, diff --git a/src/hooks/useMarketBorrows.ts b/src/hooks/useMarketBorrows.ts index 9a08e33f..7f6825d3 100644 --- a/src/hooks/useMarketBorrows.ts +++ b/src/hooks/useMarketBorrows.ts @@ -5,11 +5,8 @@ import type { SupportedNetworks } from '@/utils/networks'; import type { PaginatedMarketActivityTransactions } from '@/utils/types'; /** - * Hook to fetch borrow and repay activities for a specific market's loan asset, - * using the appropriate data source based on the network. - * Supports pagination with server-side pagination for Morpho API and client-side for Subgraph. + * Hook to fetch borrow and repay activities for a specific market. * @param marketId The ID or unique key of the market. - * @param loanAssetId The address of the loan asset for the market. * @param network The blockchain network. * @param minAssets Minimum asset amount to filter transactions (optional, defaults to '0'). * @param page Current page number (1-indexed, defaults to 1). @@ -18,7 +15,6 @@ import type { PaginatedMarketActivityTransactions } from '@/utils/types'; */ export const useMarketBorrows = ( marketId: string | undefined, - loanAssetId: string | undefined, network: SupportedNetworks | undefined, minAssets = '0', page = 1, @@ -26,24 +22,24 @@ export const useMarketBorrows = ( ) => { const queryClient = useQueryClient(); - const queryKey = ['marketBorrows', marketId, loanAssetId, network, minAssets, page, pageSize]; + const queryKey = ['marketBorrows', marketId, network, minAssets, page, pageSize]; const queryFn = useCallback( async (targetPage: number): Promise => { - if (!marketId || !loanAssetId || !network) { + if (!marketId || !network) { return null; } const targetSkip = (targetPage - 1) * pageSize; - return fetchMarketBorrows(marketId, loanAssetId, network, minAssets, pageSize, targetSkip); + return fetchMarketBorrows(marketId, network, minAssets, pageSize, targetSkip); }, - [marketId, loanAssetId, network, minAssets, pageSize], + [marketId, network, minAssets, pageSize], ); const { data, isLoading, isFetching, error, refetch } = useQuery({ queryKey: queryKey, queryFn: async () => queryFn(page), - enabled: !!marketId && !!loanAssetId && !!network, + enabled: !!marketId && !!network, staleTime: 1000 * 60 * 5, // 5 minutes - keep cached data fresh longer placeholderData: (previousData) => previousData ?? null, retry: 1, @@ -51,12 +47,12 @@ export const useMarketBorrows = ( // Prefetch adjacent pages for faster navigation useEffect(() => { - if (!marketId || !loanAssetId || !network || !data) return; + if (!marketId || !network || !data) return; const totalPages = data.totalCount > 0 ? Math.ceil(data.totalCount / pageSize) : 0; if (page > 1) { - const prevPageKey = ['marketBorrows', marketId, loanAssetId, network, minAssets, page - 1, pageSize]; + const prevPageKey = ['marketBorrows', marketId, network, minAssets, page - 1, pageSize]; void queryClient.prefetchQuery({ queryKey: prevPageKey, queryFn: async () => queryFn(page - 1), @@ -65,7 +61,7 @@ export const useMarketBorrows = ( } if (page < totalPages) { - const nextPageKey = ['marketBorrows', marketId, loanAssetId, network, minAssets, page + 1, pageSize]; + const nextPageKey = ['marketBorrows', marketId, network, minAssets, page + 1, pageSize]; void queryClient.prefetchQuery({ queryKey: nextPageKey, queryFn: async () => queryFn(page + 1), diff --git a/src/hooks/useMarketData.ts b/src/hooks/useMarketData.ts index 3a75d2d9..71d98d8c 100644 --- a/src/hooks/useMarketData.ts +++ b/src/hooks/useMarketData.ts @@ -1,6 +1,8 @@ import { useMemo } from 'react'; import { useQuery } from '@tanstack/react-query'; import { fetchMarketDetails } from '@/data-sources/market-details'; +import { applyMarketMetadata } from '@/data-sources/shared/market-metadata'; +import { useMarketMetadataQuery } from '@/hooks/queries/useMarketMetadataQuery'; import { useOracleDataQuery } from '@/hooks/queries/useOracleDataQuery'; import { useReadOnlyClient } from '@/hooks/useReadOnlyClient'; import type { SupportedNetworks } from '@/utils/networks'; @@ -11,6 +13,7 @@ export const useMarketData = (uniqueKey: string | undefined, network: SupportedN const { client, customRpcUrls, rpcConfigVersion } = useReadOnlyClient(network); const queryKey = ['marketData', uniqueKey, network, rpcConfigVersion]; const { getOracleData } = useOracleDataQuery(); + const { data: marketMetadata, error: marketMetadataError, refetch: refetchMarketMetadata } = useMarketMetadataQuery(uniqueKey, network); const { data, isLoading, error, refetch } = useQuery({ queryKey: queryKey, @@ -79,24 +82,27 @@ export const useMarketData = (uniqueKey: string | undefined, network: SupportedN const enrichedMarket = useMemo(() => { if (!data || !network) return data; - const oracleData = getOracleData(data.oracleAddress, network); + const marketWithMetadata = applyMarketMetadata(data, marketMetadata); + const oracleData = getOracleData(marketWithMetadata.oracleAddress, network); if (oracleData) { return { - ...data, + ...marketWithMetadata, oracle: { data: oracleData, }, }; } - return data; - }, [data, network, getOracleData]); + return marketWithMetadata; + }, [data, marketMetadata, network, getOracleData]); return { data: enrichedMarket, isLoading: isLoading, - error: error, - refetch: refetch, + error: error ?? (!data ? marketMetadataError : null), + refetch: async () => { + await Promise.all([refetch(), refetchMarketMetadata()]); + }, }; }; diff --git a/src/hooks/useMarketLiquidations.ts b/src/hooks/useMarketLiquidations.ts index 7bd019a1..4cdfd6c6 100644 --- a/src/hooks/useMarketLiquidations.ts +++ b/src/hooks/useMarketLiquidations.ts @@ -10,7 +10,6 @@ import type { MarketLiquidationTransaction } from '@/utils/types'; // Use simpli * @returns List of liquidation transactions for the market. */ export const useMarketLiquidations = (marketId: string | undefined, network: SupportedNetworks | undefined) => { - // Note: loanAssetId is not needed for liquidations query const queryKey = ['marketLiquidations', marketId, network]; const { data, isLoading, error, refetch } = useQuery({ diff --git a/src/hooks/useMarketSupplies.ts b/src/hooks/useMarketSupplies.ts index 57c2ae78..335d155f 100644 --- a/src/hooks/useMarketSupplies.ts +++ b/src/hooks/useMarketSupplies.ts @@ -5,11 +5,8 @@ import type { SupportedNetworks } from '@/utils/networks'; import type { PaginatedMarketActivityTransactions } from '@/utils/types'; /** - * Hook to fetch supply and withdraw activities for a specific market's loan asset, - * using the appropriate data source based on the network. - * Supports pagination with server-side pagination for Morpho API and client-side for Subgraph. + * Hook to fetch supply and withdraw activities for a specific market. * @param marketId The ID of the market (e.g., 0x...). - * @param loanAssetId The address of the loan asset for the market. * @param network The blockchain network. * @param minAssets Minimum asset amount to filter transactions (optional, defaults to '0'). * @param page Current page number (1-indexed, defaults to 1). @@ -18,7 +15,6 @@ import type { PaginatedMarketActivityTransactions } from '@/utils/types'; */ export const useMarketSupplies = ( marketId: string | undefined, - loanAssetId: string | undefined, network: SupportedNetworks | undefined, minAssets = '0', page = 1, @@ -26,24 +22,24 @@ export const useMarketSupplies = ( ) => { const queryClient = useQueryClient(); - const queryKey = ['marketSupplies', marketId, loanAssetId, network, minAssets, page, pageSize]; + const queryKey = ['marketSupplies', marketId, network, minAssets, page, pageSize]; const queryFn = useCallback( async (targetPage: number): Promise => { - if (!marketId || !loanAssetId || !network) { + if (!marketId || !network) { return null; } const targetSkip = (targetPage - 1) * pageSize; - return fetchMarketSupplies(marketId, loanAssetId, network, minAssets, pageSize, targetSkip); + return fetchMarketSupplies(marketId, network, minAssets, pageSize, targetSkip); }, - [marketId, loanAssetId, network, minAssets, pageSize], + [marketId, network, minAssets, pageSize], ); const { data, isLoading, isFetching, error, refetch } = useQuery({ queryKey: queryKey, queryFn: async () => queryFn(page), - enabled: !!marketId && !!loanAssetId && !!network, + enabled: !!marketId && !!network, staleTime: 1000 * 60 * 5, // 5 minutes - keep cached data fresh longer placeholderData: (previousData) => previousData ?? null, retry: 1, @@ -51,12 +47,12 @@ export const useMarketSupplies = ( // Prefetch adjacent pages for faster navigation useEffect(() => { - if (!marketId || !loanAssetId || !network || !data) return; + if (!marketId || !network || !data) return; const totalPages = data.totalCount > 0 ? Math.ceil(data.totalCount / pageSize) : 0; if (page > 1) { - const prevPageKey = ['marketSupplies', marketId, loanAssetId, network, minAssets, page - 1, pageSize]; + const prevPageKey = ['marketSupplies', marketId, network, minAssets, page - 1, pageSize]; void queryClient.prefetchQuery({ queryKey: prevPageKey, queryFn: async () => queryFn(page - 1), @@ -65,7 +61,7 @@ export const useMarketSupplies = ( } if (page < totalPages) { - const nextPageKey = ['marketSupplies', marketId, loanAssetId, network, minAssets, page + 1, pageSize]; + const nextPageKey = ['marketSupplies', marketId, network, minAssets, page + 1, pageSize]; void queryClient.prefetchQuery({ queryKey: nextPageKey, queryFn: async () => queryFn(page + 1), diff --git a/src/hooks/usePositionReport.ts b/src/hooks/usePositionReport.ts index 46dddbc0..240f7842 100644 --- a/src/hooks/usePositionReport.ts +++ b/src/hooks/usePositionReport.ts @@ -6,7 +6,7 @@ import { getClient } from '@/utils/rpc'; import { estimateBlockAtTimestamp } from '@/utils/blockEstimation'; import type { Market, MarketPosition, UserTransaction } from '@/utils/types'; import { useCustomRpc } from '@/stores/useCustomRpc'; -import { fetchUserTransactions } from './queries/fetchUserTransactions'; +import { fetchAllUserTransactions } from './queries/fetchUserTransactions'; export type PositionReport = { market: Market; @@ -76,38 +76,23 @@ export const usePositionReport = ( // Fetch ALL transactions for this asset with auto-pagination // Query by assetId to discover all markets (including closed ones) - const PAGE_SIZE = 1000; // Larger page size for report generation - let allTransactions: UserTransaction[] = []; - let hasMore = true; - let skip = 0; - - while (hasMore) { - const transactionResult = await fetchUserTransactions({ + const transactionResult = await fetchAllUserTransactions( + { userAddress: [account], chainId: selectedAsset.chainId, timestampGte: actualStartTimestamp, timestampLte: actualEndTimestamp, assetIds: [selectedAsset.address], - first: PAGE_SIZE, - skip, - }); - - if (!transactionResult) { - throw new Error('Failed to fetch transactions'); - } - - allTransactions = [...allTransactions, ...transactionResult.items]; - - // Check if we've fetched all transactions - hasMore = transactionResult.items.length === PAGE_SIZE; - skip += PAGE_SIZE; + }, + { + pageSize: 1000, + }, + ); - // Safety check to prevent infinite loops (50 pages = 50k transactions) - if (skip > PAGE_SIZE * 50) { - console.warn('Reached maximum pagination limit (50k transactions), some data might be missing'); - break; - } + if (transactionResult.error) { + throw new Error(transactionResult.error); } + const allTransactions = transactionResult.items; // Discover unique markets from transactions (includes closed markets) const discoveredMarketIds = [...new Set(allTransactions.map((tx) => tx.data?.market?.uniqueKey).filter((id): id is string => !!id))]; diff --git a/src/hooks/useProcessedMarkets.ts b/src/hooks/useProcessedMarkets.ts index 6cd729ec..e855ff05 100644 --- a/src/hooks/useProcessedMarkets.ts +++ b/src/hooks/useProcessedMarkets.ts @@ -1,12 +1,16 @@ import { useMemo } from 'react'; +import { applyMarketMetadataMap } from '@/data-sources/shared/market-metadata'; import { useMarketsQuery } from '@/hooks/queries/useMarketsQuery'; +import { useMarketsMetadataQuery } from '@/hooks/queries/useMarketsMetadataQuery'; import { useOracleDataQuery } from '@/hooks/queries/useOracleDataQuery'; import { useTokenPrices } from '@/hooks/useTokenPrices'; import { useBlacklistedMarkets } from '@/stores/useBlacklistedMarkets'; import { useAppSettings } from '@/stores/useAppSettings'; import { collectTokenPriceInputsForMarkets, applyTokenPriceResolutionToMarkets } from '@/data-sources/shared/market-usd'; import { isForceUnwhitelisted } from '@/utils/markets'; -import type { Market } from '@/utils/types'; +import type { Market, MarketMetadata } from '@/utils/types'; + +const EMPTY_MARKET_METADATA_MAP = new Map(); /** * Processes raw markets data with blacklist filtering and oracle enrichment. @@ -30,6 +34,7 @@ import type { Market } from '@/utils/types'; */ export const useProcessedMarkets = () => { const { data: rawMarketsFromQuery, isLoading, isRefetching, error, refetch } = useMarketsQuery(); + const { data: marketMetadataMap, refetch: refetchMetadata } = useMarketsMetadataQuery(); const { getAllBlacklistedKeys, customBlacklistedMarkets } = useBlacklistedMarkets(); const { getOracleData } = useOracleDataQuery(); const { showUnwhitelistedMarkets } = useAppSettings(); @@ -94,19 +99,29 @@ export const useProcessedMarkets = () => { return allMarketsWithUsd.filter((market) => market.whitelisted); }, [allMarketsWithUsd]); + const allMarketsWithMetadata = useMemo(() => { + return applyMarketMetadataMap(allMarketsWithUsd, marketMetadataMap ?? EMPTY_MARKET_METADATA_MAP); + }, [allMarketsWithUsd, marketMetadataMap]); + + const whitelistedMarketsWithMetadata = useMemo(() => { + return allMarketsWithMetadata.filter((market) => market.whitelisted); + }, [allMarketsWithMetadata]); + // Computed markets based on showUnwhitelistedMarkets setting (for backward compatibility) const markets = useMemo(() => { - return showUnwhitelistedMarkets ? allMarketsWithUsd : whitelistedMarketsWithUsd; - }, [showUnwhitelistedMarkets, allMarketsWithUsd, whitelistedMarketsWithUsd]); + return showUnwhitelistedMarkets ? allMarketsWithMetadata : whitelistedMarketsWithMetadata; + }, [showUnwhitelistedMarkets, allMarketsWithMetadata, whitelistedMarketsWithMetadata]); return { ...processedData, - allMarkets: allMarketsWithUsd, - whitelistedMarkets: whitelistedMarketsWithUsd, + allMarkets: allMarketsWithMetadata, + whitelistedMarkets: whitelistedMarketsWithMetadata, markets, // Computed from setting (backward compatible with old context) loading: isLoading, isRefetching, error, - refetch, + refetch: async () => { + await Promise.all([refetch(), refetchMetadata()]); + }, }; }; diff --git a/src/hooks/useUserPosition.ts b/src/hooks/useUserPosition.ts index 22201798..b028163a 100644 --- a/src/hooks/useUserPosition.ts +++ b/src/hooks/useUserPosition.ts @@ -12,7 +12,7 @@ import { useProcessedMarkets } from './useProcessedMarkets'; * Hook to fetch a user's position in a specific market. * * Prioritizes the latest on-chain snapshot via `fetchPositionSnapshot`. - * Falls back to the configured data source (Morpho API or Subgraph) if the snapshot is unavailable. + * Falls back to the indexed position adapter when the snapshot is unavailable. * * @param user The user's address. * @param chainId The network ID. @@ -75,8 +75,8 @@ const useUserPosition = (user: string | undefined, chainId: SupportedNetworks | }, }; } else { - // Local market data NOT found, need to fetch from fallback to get structure - console.warn(`Local market data not found for ${marketKey}. Fetching from fallback source to combine with snapshot.`); + // Local market data NOT found, fetch from the indexed adapter to hydrate the position shape. + console.warn(`Local market data not found for ${marketKey}. Fetching indexed position data to combine with snapshot.`); const fallbackPosition = await fetchUserPositionForMarket(marketKey, user, chainId, { customRpcUrls, }); diff --git a/src/hooks/useUserPositionsSummaryData.ts b/src/hooks/useUserPositionsSummaryData.ts index cf7ca6df..9da7eadb 100644 --- a/src/hooks/useUserPositionsSummaryData.ts +++ b/src/hooks/useUserPositionsSummaryData.ts @@ -1,6 +1,7 @@ import { useEffect, useMemo } from 'react'; import { useQueryClient } from '@tanstack/react-query'; import { estimateBlockAtTimestamp } from '@/utils/blockEstimation'; +import { getChainScopedMarketKey } from '@/utils/marketIdentity'; import type { SupportedNetworks } from '@/utils/networks'; import useUserPositions, { positionKeys } from './useUserPositions'; import { useCurrentBlocks } from './queries/useCurrentBlocks'; @@ -66,18 +67,31 @@ const useUserPositionsSummaryData = (user: string | undefined, period: EarningsP mergeUserTransactionsWithRecentCache({ userAddress: user, chainIds: uniqueChainIds, - apiTransactions: txData?.items ?? [], + apiTransactions: txData?.items.filter((tx) => + positions?.some( + (position) => + getChainScopedMarketKey(position.market.uniqueKey, position.market.morphoBlue.chain.id) === + getChainScopedMarketKey(tx.data.market.uniqueKey, tx.chainId), + ), + ) ?? [], }), - [user, uniqueChainIds, txData?.items], + [user, uniqueChainIds, txData?.items, positions], ); useEffect(() => { reconcileUserTransactionHistoryCache({ userAddress: user, chainIds: uniqueChainIds, - apiTransactions: txData?.items ?? [], + apiTransactions: + txData?.items.filter((tx) => + positions?.some( + (position) => + getChainScopedMarketKey(position.market.uniqueKey, position.market.morphoBlue.chain.id) === + getChainScopedMarketKey(tx.data.market.uniqueKey, tx.chainId), + ), + ) ?? [], }); - }, [user, uniqueChainIds, txData?.items]); + }, [user, uniqueChainIds, txData?.items, positions]); const { data: allSnapshots, diff --git a/src/utils/subgraph-types.ts b/src/utils/subgraph-types.ts deleted file mode 100644 index 4eb182b4..00000000 --- a/src/utils/subgraph-types.ts +++ /dev/null @@ -1,81 +0,0 @@ -import type { Address } from 'viem'; - -export type SubgraphToken = { - id: Address; - name: string; - symbol: string; - decimals: number; - lastPriceUSD: string | null; -}; - -export type SubgraphOracle = { - id: string; - oracleAddress: Address; - oracleSource: string | null; - isActive: boolean; - isUSD: boolean; -}; - -export type SubgraphInterestRate = { - id: string; - rate: string; - side: 'LENDER' | 'BORROWER'; - type: 'STABLE' | 'VARIABLE' | 'FIXED'; -}; - -export type SubgraphProtocolInfo = { - id: string; - network: string; - protocol: string; -}; - -export type SubgraphMarket = { - id: Address; - name: string; - isActive: boolean; - canBorrowFrom: boolean; - canUseAsCollateral: boolean; - maximumLTV: string; - liquidationThreshold: string; - liquidationPenalty: string; - createdTimestamp: string; - createdBlockNumber: string; - lltv: string; - irm: Address; - inputToken: SubgraphToken; - borrowedToken: SubgraphToken; - variableBorrowedTokenBalance: string | null; - inputTokenBalance: string; - - totalValueLockedUSD: string; - totalDepositBalanceUSD: string; - totalBorrowBalanceUSD: string; - totalSupplyShares: string; - totalBorrowShares: string; - - totalSupply: string; - totalBorrow: string; - totalCollateral: string; - - lastUpdate: string; - reserves: string; - reserveFactor: string; - fee: string; - oracle: SubgraphOracle; - rates: SubgraphInterestRate[]; - protocol: SubgraphProtocolInfo; -}; - -export type SubgraphMarketsQueryResponse = { - data: { - markets: SubgraphMarket[]; - }; - errors?: { message: string }[]; -}; - -export type SubgraphMarketQueryResponse = { - data: { - market: SubgraphMarket | null; - }; - errors?: { message: string }[]; -}; diff --git a/src/utils/subgraph-urls.ts b/src/utils/subgraph-urls.ts deleted file mode 100644 index 73219379..00000000 --- a/src/utils/subgraph-urls.ts +++ /dev/null @@ -1,42 +0,0 @@ -import { SupportedNetworks } from './networks'; - -const apiKey = process.env.NEXT_PUBLIC_THEGRAPH_API_KEY; - -// Ensure the API key is available -if (!apiKey) { - console.error('NEXT_PUBLIC_THEGRAPH_API_KEY is not set in environment variables.'); -} - -const baseSubgraphUrl = apiKey - ? `https://gateway.thegraph.com/api/${apiKey}/subgraphs/id/71ZTy1veF9twER9CLMnPWeLQ7GZcwKsjmygejrgKirqs` - : undefined; - -const mainnetSubgraphUrl = apiKey - ? `https://gateway.thegraph.com/api/${apiKey}/subgraphs/id/8Lz789DP5VKLXumTMTgygjU2xtuzx8AhbaacgN5PYCAs` - : undefined; - -const polygonSubgraphUrl = apiKey - ? `https://gateway.thegraph.com/api/${apiKey}/subgraphs/id/EhFokmwryNs7qbvostceRqVdjc3petuD13mmdUiMBw8Y` - : undefined; - -const unichainSubgraphUrl = apiKey - ? `https://gateway.thegraph.com/api/${apiKey}/subgraphs/id/ESbNRVHte3nwhcHveux9cK4FFAZK3TTLc5mKQNtpYgmu` - : undefined; - -const arbitrumSubgraph = apiKey - ? `https://gateway.thegraph.com/api/${apiKey}/subgraphs/id/XsJn88DNCHJ1kgTqYeTgHMQSK4LuG1LR75339QVeQ26` - : undefined; - -// Map network IDs (from SupportedNetworks) to Subgraph URLs -// Note: HyperEVM goldsky subgraph was removed (endpoint deleted) -export const SUBGRAPH_URLS: Partial> = { - [SupportedNetworks.Base]: baseSubgraphUrl, - [SupportedNetworks.Mainnet]: mainnetSubgraphUrl, - [SupportedNetworks.Polygon]: polygonSubgraphUrl, - [SupportedNetworks.Unichain]: unichainSubgraphUrl, - [SupportedNetworks.Arbitrum]: arbitrumSubgraph, -}; - -export const getSubgraphUrl = (network: SupportedNetworks): string | undefined => { - return SUBGRAPH_URLS[network]; -}; diff --git a/src/utils/types.ts b/src/utils/types.ts index 64c90dc7..4e1fe694 100644 --- a/src/utils/types.ts +++ b/src/utils/types.ts @@ -41,6 +41,7 @@ export enum UserTxTypes { } export type UserTransaction = { + chainId: number; hash: string; timestamp: number; type: UserTxTypes; @@ -92,6 +93,15 @@ export type MarketWarning = { __typename: string; }; +export type MarketMetadata = { + uniqueKey: string; + chainId: SupportedNetworks; + warnings: MarketWarning[]; + supplyingVaults: { + address: string; + }[]; +}; + export type WarningWithDetail = { code: string; description: string; diff --git a/src/utils/user-transaction-history-cache.ts b/src/utils/user-transaction-history-cache.ts index 8147898a..696a2fd2 100644 --- a/src/utils/user-transaction-history-cache.ts +++ b/src/utils/user-transaction-history-cache.ts @@ -34,7 +34,7 @@ const getTransactionDedupKey = (transaction: UserTransaction): string => { const marketKey = transaction.data?.market?.uniqueKey?.toLowerCase() ?? ''; const assets = transaction.data?.assets ?? '0'; const shares = transaction.data?.shares ?? '0'; - return `${transaction.hash.toLowerCase()}:${transaction.type}:${marketKey}:${assets}:${shares}`; + return `${transaction.chainId}:${transaction.hash.toLowerCase()}:${transaction.type}:${marketKey}:${assets}:${shares}`; }; const getCacheEntryDedupKey = (entry: CachedUserTransactionEntry): string => @@ -51,6 +51,7 @@ const isCacheEntry = (value: unknown): value is CachedUserTransactionEntry => { typeof candidate.logIndex === 'number' && !!candidate.tx && typeof candidate.tx.hash === 'string' && + typeof candidate.tx.chainId === 'number' && typeof candidate.tx.timestamp === 'number' && typeof candidate.tx.type === 'string' && !!candidate.tx.data && @@ -158,6 +159,7 @@ export function cacheUserTransactionHistoryFromReceipt({ expiresAt, logIndex: log.logIndex ?? index, tx: { + chainId, hash: txHash, timestamp, type: txType, From 3c0364d969a1acd430b66bf58afb1e5e134303c7 Mon Sep 17 00:00:00 2001 From: antoncoding Date: Sun, 15 Mar 2026 00:40:05 +0800 Subject: [PATCH 5/5] chore: review fixes --- AGENTS.md | 2 +- src/data-sources/envio/market-participants.ts | 93 +++++++++++++------ src/data-sources/envio/market.ts | 10 +- src/data-sources/market-catalog.ts | 29 ++++-- src/data-sources/morpho-api/market.ts | 2 +- .../shared/historical-chain-context.ts | 6 +- .../transaction-history-preview.tsx | 2 +- src/graphql/envio-queries.ts | 20 ++++ src/hooks/queries/fetchUserTransactions.ts | 35 ++++++- 9 files changed, 149 insertions(+), 50 deletions(-) diff --git a/AGENTS.md b/AGENTS.md index 33b3d43d..731b381f 100644 --- a/AGENTS.md +++ b/AGENTS.md @@ -174,7 +174,7 @@ When touching transaction and position flows, validation MUST include all releva 38. **RPC configuration reactivity integrity**: any query or cache that depends on the active custom RPC selection must key or invalidate at the exact RPC-dependent layer (for example snapshot/enrichment queries), while RPC-independent discovery queries must not churn on RPC changes. Custom-RPC switching must not leave position or market state pinned to data fetched through the previous endpoint. 39. **Sparse market contract parity**: any fallback or single-entity market/position path that starts from sparse source data must hydrate missing market fields through the shared market-detail/catalog enrichment chokepoints before the result reaches shared UI consumers. Do not let raw fallback markets bypass shared USD, target-rate, blacklist, or chain-scoped identity normalization. 40. **Indexer market pre-hydration integrity**: shared market-catalog/indexer adapters must exclude structurally invalid markets (for example zero-address IRM or collateral token, plus local blacklist gates) before token-metadata hydration or enrichment begins, and source logs must distinguish raw fetch completion from downstream enrichment so slow stages are attributable. -41. **Pagination completeness integrity**: shared pagination utilities must validate positive page sizes/non-negative limits, and any paged market/participant/history source must either fetch complete results or fail closed to fallback. Do not silently cap user-visible datasets at arbitrary ceilings or treat incomplete/invalid pages as successful partial data. +41. **Pagination completeness integrity**: shared pagination utilities must validate positive page sizes/non-negative limits, and any paged market/participant/history source must either fetch complete results or fail closed to fallback. Paged adapters must honor the requested window at the source and return an authoritative total count; do not fetch-all-then-slice in memory, silently cap user-visible datasets at arbitrary ceilings, or treat incomplete/invalid pages as successful partial data. 42. **Indexed empty-result integrity**: shared indexer-backed market/position/transaction adapters must distinguish a legitimate empty domain result from an internal hydration/filtering failure. Empty arrays/zero counts are authoritative only when the adapter completed without downstream mapping, metadata, or market-hydration errors; otherwise throw and let the shared fallback layer decide. 43. **Cross-chain adapter ownership integrity**: shared market/position/history adapters must own chain fan-out, pagination, and source fallback internally. Feature hooks and UI call sites must request the needed chain scope, not reintroduce per-chain loops, parallel legacy-source requests, or source-specific branching above the adapter boundary. diff --git a/src/data-sources/envio/market-participants.ts b/src/data-sources/envio/market-participants.ts index ff65d141..2655aacb 100644 --- a/src/data-sources/envio/market-participants.ts +++ b/src/data-sources/envio/market-participants.ts @@ -1,12 +1,15 @@ -import { envioMarketBorrowersQuery, envioMarketSuppliersQuery } from '@/graphql/envio-queries'; +import { + envioMarketBorrowersCountQuery, + envioMarketBorrowersQuery, + envioMarketSuppliersCountQuery, + envioMarketSuppliersQuery, +} from '@/graphql/envio-queries'; import type { SupportedNetworks } from '@/utils/networks'; import type { MarketBorrower, MarketSupplier, PaginatedMarketBorrowers, PaginatedMarketSuppliers } from '@/utils/types'; import { fetchEnvioMarket } from './market'; import { envioGraphqlFetcher } from './fetchers'; -import { fetchAllEnvioPages, normalizeEnvioString } from './utils'; +import { normalizeEnvioString } from './utils'; -const ENVIO_PARTICIPANTS_PAGE_SIZE = 500; -const ENVIO_PARTICIPANTS_MAX_ITEMS = Number.MAX_SAFE_INTEGER; const ENVIO_PARTICIPANTS_TIMEOUT_MS = 15_000; type EnvioSupplierRow = { @@ -28,6 +31,16 @@ type EnvioParticipantsResponse = { }; }; +type EnvioParticipantsCountResponse = { + data?: { + Position_aggregate?: { + aggregate?: { + count?: number | null; + } | null; + } | null; + }; +}; + const toAssets = (shares: string, totalAssets: string, totalShares: string): string => { try { const parsedShares = BigInt(shares); @@ -70,6 +83,26 @@ const fetchPositionRows = async ; +}): Promise => { + const response = await envioGraphqlFetcher( + query, + { + where, + }, + { + timeoutMs: ENVIO_PARTICIPANTS_TIMEOUT_MS, + }, + ); + + return response.data?.Position_aggregate?.aggregate?.count ?? 0; +}; + export const fetchEnvioMarketSuppliers = async ( marketId: string, chainId: SupportedNetworks, @@ -89,17 +122,18 @@ export const fetchEnvioMarketSuppliers = async ( }, }; - const suppliers = await fetchAllEnvioPages({ - fetchPage: async (limit, offset) => - fetchPositionRows({ - limit, - offset, - query: envioMarketSuppliersQuery, - where, - }), - maxItems: ENVIO_PARTICIPANTS_MAX_ITEMS, - pageSize: ENVIO_PARTICIPANTS_PAGE_SIZE, - }); + const [suppliers, totalCount] = await Promise.all([ + fetchPositionRows({ + limit: pageSize, + offset: skip, + query: envioMarketSuppliersQuery, + where, + }), + fetchPositionCount({ + query: envioMarketSuppliersCountQuery, + where, + }), + ]); const items: MarketSupplier[] = suppliers.map((supplier) => ({ supplyShares: normalizeEnvioString(supplier.supplyShares), @@ -107,8 +141,8 @@ export const fetchEnvioMarketSuppliers = async ( })); return { - items: items.slice(skip, skip + pageSize), - totalCount: items.length, + items, + totalCount, }; }; @@ -131,18 +165,17 @@ export const fetchEnvioMarketBorrowers = async ( }, }; - const [market, borrowers] = await Promise.all([ + const [market, borrowers, totalCount] = await Promise.all([ fetchEnvioMarket(marketId, chainId), - fetchAllEnvioPages({ - fetchPage: async (limit, offset) => - fetchPositionRows({ - limit, - offset, - query: envioMarketBorrowersQuery, - where, - }), - maxItems: ENVIO_PARTICIPANTS_MAX_ITEMS, - pageSize: ENVIO_PARTICIPANTS_PAGE_SIZE, + fetchPositionRows({ + limit: pageSize, + offset: skip, + query: envioMarketBorrowersQuery, + where, + }), + fetchPositionCount({ + query: envioMarketBorrowersCountQuery, + where, }), ]); @@ -161,7 +194,7 @@ export const fetchEnvioMarketBorrowers = async ( }); return { - items: items.slice(skip, skip + pageSize), - totalCount: items.length, + items, + totalCount, }; }; diff --git a/src/data-sources/envio/market.ts b/src/data-sources/envio/market.ts index 0bcfdb8b..030c7d2c 100644 --- a/src/data-sources/envio/market.ts +++ b/src/data-sources/envio/market.ts @@ -339,7 +339,11 @@ export const fetchEnvioMarketsByKeys = async ( const uniqueRequests = new Map(); for (const marketRequest of marketRequests) { - uniqueRequests.set(getChainScopedMarketKey(marketRequest.marketUniqueKey, marketRequest.chainId), marketRequest); + const canonicalMarketUniqueKey = marketRequest.marketUniqueKey.toLowerCase(); + uniqueRequests.set(getChainScopedMarketKey(canonicalMarketUniqueKey, marketRequest.chainId), { + ...marketRequest, + marketUniqueKey: canonicalMarketUniqueKey, + }); } const rows = await fetchEnvioMarketsPage({ @@ -371,10 +375,10 @@ export const fetchEnvioMarket = async ( ): Promise => { const marketMap = await fetchEnvioMarketsByKeys([ { - marketUniqueKey: uniqueKey, + marketUniqueKey: uniqueKey.toLowerCase(), chainId, }, ], options); - return marketMap.get(getChainScopedMarketKey(uniqueKey, chainId)) ?? null; + return marketMap.get(getChainScopedMarketKey(uniqueKey.toLowerCase(), chainId)) ?? null; }; diff --git a/src/data-sources/market-catalog.ts b/src/data-sources/market-catalog.ts index b9841cfb..38c1ca4b 100644 --- a/src/data-sources/market-catalog.ts +++ b/src/data-sources/market-catalog.ts @@ -5,6 +5,7 @@ import { toIndexedMarket } from '@/data-sources/shared/market-metadata'; import { filterTokenBlacklistedMarkets } from '@/data-sources/shared/market-visibility'; import { enrichMarketsWithHistoricalApysWithinTimeout } from '@/data-sources/shared/market-rate-enrichment'; import { enrichMarketsWithTargetRate } from '@/data-sources/shared/market-target-rate-enrichment'; +import { fillMissingMarketUsdValues } from '@/data-sources/shared/market-usd'; import { getErrorMessage, logDataSourceEvent } from '@/data-sources/shared/source-debug'; import type { CustomRpcUrls } from '@/stores/useCustomRpc'; import { ALL_SUPPORTED_NETWORKS, type SupportedNetworks } from '@/utils/networks'; @@ -32,7 +33,8 @@ const withTimeout = async (promise: Promise, timeoutMs: number, label: str }; const enrichCatalogMarkets = async (markets: Market[], customRpcUrls?: CustomRpcUrls): Promise => { - const marketsWithTargetRate = await enrichMarketsWithTargetRate(markets, { + const marketsWithUsd = await fillMissingMarketUsdValues(markets); + const marketsWithTargetRate = await enrichMarketsWithTargetRate(marketsWithUsd, { customRpcUrls, }); @@ -45,15 +47,26 @@ const enrichCatalogMarketsWithLogging = async ( details: Record, ): Promise => { const enrichmentStartedAt = Date.now(); - const enrichedMarkets = await enrichCatalogMarkets(markets, customRpcUrls); + try { + const enrichedMarkets = await enrichCatalogMarkets(markets, customRpcUrls); - logDataSourceEvent('market-catalog', 'market enrichment completed', { - ...details, - count: enrichedMarkets.length, - durationMs: Date.now() - enrichmentStartedAt, - }); + logDataSourceEvent('market-catalog', 'market enrichment completed', { + ...details, + count: enrichedMarkets.length, + durationMs: Date.now() - enrichmentStartedAt, + }); - return enrichedMarkets; + return enrichedMarkets; + } catch (error) { + logDataSourceEvent('market-catalog', 'market enrichment failed, using base catalog', { + ...details, + count: markets.length, + durationMs: Date.now() - enrichmentStartedAt, + reason: getErrorMessage(error), + }); + + return markets; + } }; export const fetchMarketCatalog = async ( diff --git a/src/data-sources/morpho-api/market.ts b/src/data-sources/morpho-api/market.ts index cb6c58b7..75bb6604 100644 --- a/src/data-sources/morpho-api/market.ts +++ b/src/data-sources/morpho-api/market.ts @@ -64,7 +64,7 @@ export const fetchMorphoMarket = async (uniqueKey: string, network: SupportedNet return processMarketData(response.data.marketByUniqueKey); }; -const fetchMorphoMarketsPage = async (network: SupportedNetworks, skip: number, pageSize: number): Promise => { +const fetchMorphoMarketsPage = async (network: SupportedNetworks, skip: number, pageSize: number): Promise => { return fetchMorphoMarketsPageForChains([network], skip, pageSize); }; diff --git a/src/data-sources/shared/historical-chain-context.ts b/src/data-sources/shared/historical-chain-context.ts index 5979889f..4091d0a6 100644 --- a/src/data-sources/shared/historical-chain-context.ts +++ b/src/data-sources/shared/historical-chain-context.ts @@ -51,6 +51,10 @@ export const fetchHistoricalChainContext = async ({ targetTimestamps?: number[]; timeoutMs?: number; }): Promise => { + if ((targetLookbackSeconds?.length ?? 0) > 0 && (targetTimestamps?.length ?? 0) > 0) { + throw new Error('Provide either targetLookbackSeconds or targetTimestamps, not both.'); + } + const targetSignature = targetLookbackSeconds && targetLookbackSeconds.length > 0 ? `lookback:${targetLookbackSeconds.join(',')}` : `ts:${(targetTimestamps ?? []).join(',')}`; const cacheKey = `${chainId}:${targetSignature}:${timeoutMs}`; @@ -116,7 +120,7 @@ export const fetchHistoricalChainContext = async ({ const nextCachedByClient = cachedByClient ?? new Map(); nextCachedByClient.set(cacheKey, { - expiresAt: now + CHAIN_CONTEXT_CACHE_TTL_MS, + expiresAt: now + Math.max(CHAIN_CONTEXT_CACHE_TTL_MS, timeoutMs), promise: requestPromise, }); historicalChainContextCache.set(client, nextCachedByClient); diff --git a/src/features/history/components/transaction-history-preview.tsx b/src/features/history/components/transaction-history-preview.tsx index 22409d74..6a9f4a3a 100644 --- a/src/features/history/components/transaction-history-preview.tsx +++ b/src/features/history/components/transaction-history-preview.tsx @@ -138,7 +138,7 @@ export function TransactionHistoryPreview({ ) : ( history.map((group) => { - const chainIdForTx = chainId ?? group.transactions[0]?.chainId; + const chainIdForTx = group.transactions[0]?.chainId ?? chainId; // Handle rebalances if (group.isMetaAction && group.metaActionType === 'rebalance') { diff --git a/src/graphql/envio-queries.ts b/src/graphql/envio-queries.ts index a822564d..0c334311 100644 --- a/src/graphql/envio-queries.ts +++ b/src/graphql/envio-queries.ts @@ -90,6 +90,16 @@ export const envioMarketSuppliersQuery = ` } `; +export const envioMarketSuppliersCountQuery = ` + query EnvioMarketSuppliersCount($where: Position_bool_exp) { + Position_aggregate(where: $where) { + aggregate { + count + } + } + } +`; + export const envioMarketBorrowersQuery = ` query EnvioMarketBorrowers($limit: Int!, $offset: Int!, $where: Position_bool_exp) { Position(limit: $limit, offset: $offset, where: $where, order_by: [{ borrowShares: desc }, { user: asc }]) { @@ -102,6 +112,16 @@ export const envioMarketBorrowersQuery = ` } `; +export const envioMarketBorrowersCountQuery = ` + query EnvioMarketBorrowersCount($where: Position_bool_exp) { + Position_aggregate(where: $where) { + aggregate { + count + } + } + } +`; + export const envioSupplyEventsQuery = ` query EnvioSupplyEvents($limit: Int!, $offset: Int!, $where: Morpho_Supply_bool_exp) { Morpho_Supply(limit: $limit, offset: $offset, where: $where, order_by: [{ timestamp: desc }, { id: desc }]) { diff --git a/src/hooks/queries/fetchUserTransactions.ts b/src/hooks/queries/fetchUserTransactions.ts index 52e98da4..2e553c38 100644 --- a/src/hooks/queries/fetchUserTransactions.ts +++ b/src/hooks/queries/fetchUserTransactions.ts @@ -29,6 +29,11 @@ export type TransactionResponse = { error: string | null; }; +const getUserTransactionDedupKey = (transaction: UserTransaction): string => { + const marketKey = transaction.data.market.uniqueKey.toLowerCase(); + return `${transaction.chainId}:${transaction.hash.toLowerCase()}:${transaction.type}:${marketKey}:${transaction.data.assets}:${transaction.data.shares}`; +}; + const resolveTransactionChainIds = (filters: TransactionFilters): number[] => { const chainIds = filters.chainIds ?? (filters.chainId != null ? [filters.chainId] : []); return [...new Set(chainIds)]; @@ -94,6 +99,8 @@ export async function fetchAllUserTransactions( const pageSize = options.pageSize ?? 1000; const maxPages = options.maxPages ?? 50; const items: UserTransaction[] = []; + let expectedTotalCount: number | null = null; + let rawFetchedCount = 0; for (let page = 0; page < maxPages; page += 1) { const response = await fetchUserTransactions({ @@ -107,19 +114,37 @@ export async function fetchAllUserTransactions( } items.push(...response.items); + rawFetchedCount += response.items.length; + expectedTotalCount ??= response.pageInfo.countTotal; - if (response.items.length < pageSize) { + if (response.items.length === 0 || rawFetchedCount >= expectedTotalCount) { break; } } - items.sort((left, right) => right.timestamp - left.timestamp); + if (expectedTotalCount != null && rawFetchedCount < expectedTotalCount) { + return { + items: [], + pageInfo: { count: 0, countTotal: expectedTotalCount }, + error: `Transaction pagination hit the maxPages limit (${maxPages}) before completion.`, + }; + } + + const dedupedItems = new Map(); + for (const item of items) { + const dedupKey = getUserTransactionDedupKey(item); + if (!dedupedItems.has(dedupKey)) { + dedupedItems.set(dedupKey, item); + } + } + + const sortedItems = Array.from(dedupedItems.values()).sort((left, right) => right.timestamp - left.timestamp); return { - items, + items: sortedItems, pageInfo: { - count: items.length, - countTotal: items.length, + count: sortedItems.length, + countTotal: sortedItems.length, }, error: null, };