diff --git a/AGENTS.md b/AGENTS.md
index acb48c02..0dac92db 100644
--- a/AGENTS.md
+++ b/AGENTS.md
@@ -176,8 +176,9 @@ When touching transaction and position flows, validation MUST include all releva
38. **Morpho vault query schema integrity**: shared Morpho vault metadata/rate queries must only request fields confirmed on the live `Vault`/`VaultState` schema. Do not add speculative top-level fields to the registry query, and do not swallow schema errors in a way that turns the global vault registry into an empty success state.
39. **User position discovery integrity**: when a shared upstream supports chain-scoped bulk position discovery (`userAddress_in` plus `chainId_in` or equivalent), use that batched chokepoint to collect position market keys before falling back to per-chain queries. Do not force one `userByAddress` request per chain when the backend can already return mixed-chain positions in one response.
40. **Source-discovery failure integrity**: market/position source-discovery hooks must fail closed when both primary and fallback providers fail for a chain. Do not convert dual-source fetch failures into empty success states; surface typed errors with source and network metadata so callers can fall back explicitly or show the failure.
-41. **Oracle metadata source integrity**: oracle vendor/type/feed classification must resolve from the scanner metadata source keyed by `chainId + oracleAddress`. Do not reintroduce Morpho API `oracles` feed enrichment into market objects or UI/filter/warning logic as a fallback source for oracle structure.
-42. **Mixed oracle badge signal integrity**: when a standard or meta oracle contains both classified feeds and unknown/unverified feeds, vendor badges and their tooltips must preserve both signals together (known vendor icon(s) plus unknown indicator/text) instead of collapsing to only the recognized vendor.
+41. **Envio market-detail read integrity**: when Envio backs market-detail participants or activity tables, share-to-asset conversions must use the already-loaded live market state keyed by `chainId + market.uniqueKey` instead of a second indexer totals query; participant caches must not store state-derived converted values behind a key that ignores the live state; event/liquidation tables must fetch only the requested page window with correct merged ordering rather than scanning full market history unless the UI explicitly requires an exact total count; provider fallbacks must page at the provider boundary or fail closed with typed source/network errors instead of fetching full history and slicing client-side or returning empty success on missing subgraph configuration; unknown-total pagination must use an explicit open-ended `hasNextPage` mode instead of synthesizing a moving “last page”; and page transitions in that mode must start from a neutral loading state rather than reusing stale rows from the previous page.
+42. **Oracle metadata source integrity**: oracle vendor/type/feed classification must resolve from the scanner metadata source keyed by `chainId + oracleAddress`. Do not reintroduce Morpho API `oracles` feed enrichment into market objects or UI/filter/warning logic as a fallback source for oracle structure.
+43. **Mixed oracle badge signal integrity**: when a standard or meta oracle contains both classified feeds and unknown/unverified feeds, vendor badges and their tooltips must preserve both signals together (known vendor icon(s) plus unknown indicator/text) instead of collapsing to only the recognized vendor.
### REQUIRED: Regression Rule Capture
diff --git a/src/components/shared/table-pagination.tsx b/src/components/shared/table-pagination.tsx
index d6d1316a..53e508fc 100644
--- a/src/components/shared/table-pagination.tsx
+++ b/src/components/shared/table-pagination.tsx
@@ -8,9 +8,8 @@ import { TooltipContent } from '@/components/shared/tooltip-content';
import { Button } from '@/components/ui/button';
import { cn } from '@/utils';
-type TablePaginationProps = {
+type TablePaginationBaseProps = {
currentPage: number;
- totalPages: number;
totalEntries: number;
pageSize: number;
onPageChange: (page: number) => void;
@@ -18,22 +17,41 @@ type TablePaginationProps = {
showEntryCount?: boolean;
};
+type FixedPaginationProps = {
+ mode: 'fixed';
+ totalPages: number;
+};
+
+type OpenPaginationProps = {
+ mode: 'open';
+ hasNextPage: boolean;
+};
+
+type TablePaginationProps = TablePaginationBaseProps & (FixedPaginationProps | OpenPaginationProps);
+
type PaginationToken = number | 'ellipsis';
export function TablePagination({
currentPage,
- totalPages,
totalEntries,
pageSize,
onPageChange,
isLoading = false,
showEntryCount = true,
+ ...paginationProps
}: TablePaginationProps) {
const [jumpPage, setJumpPage] = useState('');
const [isJumpOpen, setIsJumpOpen] = useState(false);
+ const isOpenEnded = paginationProps.mode === 'open';
+ const hasNextPage = isOpenEnded ? (paginationProps.hasNextPage ?? false) : false;
+ const effectiveTotalPages = isOpenEnded ? currentPage + Number(hasNextPage) : paginationProps.totalPages;
// Early return after all hooks
- if (totalPages === 0) {
+ if (isOpenEnded && currentPage === 1 && !hasNextPage && totalEntries === 0) {
+ return null;
+ }
+
+ if (effectiveTotalPages === 0) {
return null;
}
@@ -42,7 +60,7 @@ export function TablePagination({
const handleJumpToPage = () => {
const page = Number.parseInt(jumpPage, 10);
- if (page >= 1 && page <= totalPages) {
+ if (page >= 1 && page <= effectiveTotalPages) {
onPageChange(page);
setJumpPage('');
setIsJumpOpen(false);
@@ -57,24 +75,48 @@ export function TablePagination({
// Generate fixed-length page numbers to keep controls spatially stable.
const getPageNumbers = (): PaginationToken[] => {
+ if (isOpenEnded) {
+ const pages: PaginationToken[] = [];
+
+ if (currentPage > 1) {
+ pages.push(currentPage - 1);
+ }
+
+ pages.push(currentPage);
+
+ if (hasNextPage) {
+ pages.push(currentPage + 1);
+ }
+
+ return pages;
+ }
+
const pages: PaginationToken[] = [];
- if (totalPages <= 7) {
- for (let i = 1; i <= totalPages; i++) {
+ if (effectiveTotalPages <= 7) {
+ for (let i = 1; i <= effectiveTotalPages; i++) {
pages.push(i);
}
return pages;
}
if (currentPage <= 4) {
- return [1, 2, 3, 4, 5, 'ellipsis', totalPages];
+ return [1, 2, 3, 4, 5, 'ellipsis', effectiveTotalPages];
}
- if (currentPage >= totalPages - 3) {
- return [1, 'ellipsis', totalPages - 4, totalPages - 3, totalPages - 2, totalPages - 1, totalPages];
+ if (currentPage >= effectiveTotalPages - 3) {
+ return [
+ 1,
+ 'ellipsis',
+ effectiveTotalPages - 4,
+ effectiveTotalPages - 3,
+ effectiveTotalPages - 2,
+ effectiveTotalPages - 1,
+ effectiveTotalPages,
+ ];
}
- return [1, 'ellipsis', currentPage - 1, currentPage, currentPage + 1, 'ellipsis', totalPages];
+ return [1, 'ellipsis', currentPage - 1, currentPage, currentPage + 1, 'ellipsis', effectiveTotalPages];
};
const getItemKey = (page: PaginationToken, idx: number) => {
@@ -86,7 +128,7 @@ export function TablePagination({
const pageNumbers = getPageNumbers();
- const paginationControlWidthClass = totalPages > 1000 ? 'w-10 text-xs' : 'w-8 text-sm';
+ const paginationControlWidthClass = effectiveTotalPages > 1000 ? 'w-10 text-xs' : 'w-8 text-sm';
return (
@@ -136,7 +178,7 @@ export function TablePagination({
{/* Jump to page - only show if more than 10 pages */}
- {totalPages > 10 && (
+ {!isOpenEnded && effectiveTotalPages > 10 && (
}
/>
}
@@ -179,7 +221,7 @@ export function TablePagination({
size="sm"
type="number"
min={1}
- max={totalPages}
+ max={effectiveTotalPages}
value={jumpPage}
onChange={(e) => setJumpPage(e.target.value)}
onKeyPress={handleKeyPress}
diff --git a/src/data-sources/envio/fetchers.ts b/src/data-sources/envio/fetchers.ts
new file mode 100644
index 00000000..ccbfb67c
--- /dev/null
+++ b/src/data-sources/envio/fetchers.ts
@@ -0,0 +1,8 @@
+import { monarchGraphqlFetcher } from '@/data-sources/monarch-api/fetchers';
+
+/**
+ * Envio-backed Monarch GraphQL currently shares the Monarch API endpoint.
+ * Keep a dedicated fetcher alias so market-detail hooks can depend on an
+ * Envio chokepoint without coupling to vault-specific naming.
+ */
+export const envioGraphqlFetcher = monarchGraphqlFetcher;
diff --git a/src/data-sources/envio/market-detail.ts b/src/data-sources/envio/market-detail.ts
new file mode 100644
index 00000000..c5962f09
--- /dev/null
+++ b/src/data-sources/envio/market-detail.ts
@@ -0,0 +1,368 @@
+import { envioGraphqlFetcher } from './fetchers';
+import {
+ envioBorrowersPageQuery,
+ envioBorrowRepayPageQuery,
+ envioLiquidationsPageQuery,
+ envioSuppliersPageQuery,
+ envioSupplyWithdrawPageQuery,
+} from '@/graphql/envio-queries';
+import { convertSharesToAssets } from '@/utils/positions';
+import type {
+ Market,
+ MarketActivityTransaction,
+ MarketBorrower,
+ MarketLiquidationTransaction,
+ MarketSupplier,
+ PaginatedMarketActivityTransactions,
+ PaginatedMarketBorrowers,
+ PaginatedMarketLiquidations,
+ PaginatedMarketSuppliers,
+} from '@/utils/types';
+
+const ENVIO_SCAN_BATCH_SIZE = 1000;
+const PARTICIPANT_CACHE_TTL_MS = 2 * 60 * 1000;
+
+type CacheEntry = {
+ data: T;
+ timestamp: number;
+};
+
+type BorrowSharePriceState = Pick;
+
+type EnvioSupplierRow = {
+ user: string;
+ supplyShares: string;
+};
+
+type EnvioBorrowerRow = {
+ user: string;
+ borrowShares: string;
+ collateral: string;
+};
+
+type CachedBorrowerPosition = {
+ userAddress: string;
+ borrowShares: string;
+ collateral: string;
+};
+
+type EnvioGraphqlResponse = {
+ data?: T;
+};
+
+type EnvioSuppliersPageResponse = EnvioGraphqlResponse<{
+ Position: EnvioSupplierRow[];
+}>;
+
+type EnvioBorrowersPageResponse = EnvioGraphqlResponse<{
+ Position: EnvioBorrowerRow[];
+}>;
+
+type EnvioActivityEventRow = {
+ txHash: string;
+ timestamp: string;
+ assets: string;
+ onBehalf: string;
+};
+
+type EnvioSupplyWithdrawPageResponse = EnvioGraphqlResponse<{
+ supplies: EnvioActivityEventRow[];
+ withdraws: EnvioActivityEventRow[];
+}>;
+
+type EnvioBorrowRepayPageResponse = EnvioGraphqlResponse<{
+ borrows: EnvioActivityEventRow[];
+ repays: EnvioActivityEventRow[];
+}>;
+
+type EnvioLiquidationRow = {
+ txHash: string;
+ timestamp: string;
+ caller: string;
+ borrower: string;
+ repaidAssets: string;
+ seizedAssets: string;
+ badDebtAssets: string;
+};
+
+type EnvioLiquidationsPageResponse = EnvioGraphqlResponse<{
+ Morpho_Liquidate: EnvioLiquidationRow[];
+}>;
+
+const suppliersCache = new Map>();
+const borrowersCache = new Map>();
+
+const toCacheKey = (parts: Array): string => parts.join(':');
+
+const getCachedValue = (cache: Map>, cacheKey: string, ttlMs: number): T | null => {
+ const cached = cache.get(cacheKey);
+ if (!cached) return null;
+ if (Date.now() - cached.timestamp >= ttlMs) {
+ cache.delete(cacheKey);
+ return null;
+ }
+ return cached.data;
+};
+
+const setCachedValue = (cache: Map>, cacheKey: string, data: T): T => {
+ cache.set(cacheKey, {
+ data,
+ timestamp: Date.now(),
+ });
+ return data;
+};
+
+const paginateItems = (items: T[], pageSize: number, skip: number) => {
+ return {
+ items: items.slice(skip, skip + pageSize),
+ totalCount: items.length,
+ };
+};
+
+const paginateWindowedItems = (items: T[], pageSize: number, skip: number) => {
+ const sliceEnd = skip + pageSize;
+ const pageItems = items.slice(skip, sliceEnd);
+ const hasNextPage = items.length > sliceEnd;
+ const totalCount = skip >= items.length ? items.length : Math.max(items.length, skip + pageItems.length + Number(hasNextPage));
+
+ return {
+ items: pageItems,
+ totalCount,
+ hasNextPage,
+ };
+};
+
+const toTimestamp = (value: string): number => Number.parseInt(value, 10);
+
+const scanAllPages = async ({ fetchPage }: { fetchPage: (offset: number, limit: number) => Promise }): Promise => {
+ const items: T[] = [];
+ let offset = 0;
+
+ while (true) {
+ const pageItems = await fetchPage(offset, ENVIO_SCAN_BATCH_SIZE);
+ items.push(...pageItems);
+
+ if (pageItems.length < ENVIO_SCAN_BATCH_SIZE) {
+ return items;
+ }
+
+ offset += pageItems.length;
+ }
+};
+
+const getCachedOrLoad = async ({
+ cache,
+ cacheKey,
+ ttlMs,
+ loader,
+}: {
+ cache: Map>;
+ cacheKey: string;
+ ttlMs: number;
+ loader: () => Promise;
+}): Promise => {
+ const cached = getCachedValue(cache, cacheKey, ttlMs);
+ if (cached) return cached;
+ return setCachedValue(cache, cacheKey, await loader());
+};
+
+const sortActivityTransactions = (left: MarketActivityTransaction, right: MarketActivityTransaction): number => {
+ if (right.timestamp !== left.timestamp) {
+ return right.timestamp - left.timestamp;
+ }
+
+ return right.hash.localeCompare(left.hash);
+};
+
+const mapEnvioActivityRows = (rows: EnvioActivityEventRow[], type: MarketActivityTransaction['type']): MarketActivityTransaction[] => {
+ return rows.map((event) => ({
+ type,
+ hash: event.txHash,
+ timestamp: toTimestamp(event.timestamp),
+ amount: event.assets,
+ userAddress: event.onBehalf,
+ }));
+};
+
+const convertBorrowSharesToAssets = (borrowShares: string, marketState: BorrowSharePriceState): string => {
+ return convertSharesToAssets(BigInt(borrowShares), BigInt(marketState.borrowAssets), BigInt(marketState.borrowShares)).toString();
+};
+
+const mapCachedBorrowers = (positions: CachedBorrowerPosition[], marketState: BorrowSharePriceState): MarketBorrower[] => {
+ return positions.map((position) => ({
+ userAddress: position.userAddress,
+ borrowAssets: convertBorrowSharesToAssets(position.borrowShares, marketState),
+ collateral: position.collateral,
+ }));
+};
+
+const fetchEnvioSuppliersAll = async (marketId: string, chainId: number, minShares: string): Promise => {
+ const cacheKey = toCacheKey(['suppliers', chainId, marketId.toLowerCase(), minShares]);
+
+ return getCachedOrLoad({
+ cache: suppliersCache,
+ cacheKey,
+ ttlMs: PARTICIPANT_CACHE_TTL_MS,
+ loader: async () => {
+ return scanAllPages({
+ fetchPage: async (offset, limit) => {
+ const response = await envioGraphqlFetcher(envioSuppliersPageQuery, {
+ chainId,
+ marketId,
+ minShares,
+ limit,
+ offset,
+ });
+
+ return (response.data?.Position ?? []).map((position) => ({
+ userAddress: position.user,
+ supplyShares: position.supplyShares,
+ }));
+ },
+ });
+ },
+ });
+};
+
+const fetchEnvioBorrowersAll = async (marketId: string, chainId: number, minShares: string): Promise => {
+ const cacheKey = toCacheKey(['borrowers', chainId, marketId.toLowerCase(), minShares]);
+
+ return getCachedOrLoad({
+ cache: borrowersCache,
+ cacheKey,
+ ttlMs: PARTICIPANT_CACHE_TTL_MS,
+ loader: async () => {
+ return scanAllPages({
+ fetchPage: async (offset, limit) => {
+ const response = await envioGraphqlFetcher(envioBorrowersPageQuery, {
+ chainId,
+ marketId,
+ minShares,
+ limit,
+ offset,
+ });
+
+ return (response.data?.Position ?? []).map((position) => ({
+ userAddress: position.user,
+ borrowShares: position.borrowShares,
+ collateral: position.collateral,
+ }));
+ },
+ });
+ },
+ });
+};
+
+const fetchEnvioSupplyWithdrawWindow = async (
+ marketId: string,
+ chainId: number,
+ minAssets: string,
+ limit: number,
+): Promise => {
+ const response = await envioGraphqlFetcher(envioSupplyWithdrawPageQuery, {
+ chainId,
+ marketId,
+ minAssets,
+ limit,
+ offset: 0,
+ });
+
+ const supplies = mapEnvioActivityRows(response.data?.supplies ?? [], 'MarketSupply');
+ const withdraws = mapEnvioActivityRows(response.data?.withdraws ?? [], 'MarketWithdraw');
+
+ return [...supplies, ...withdraws].sort(sortActivityTransactions);
+};
+
+const fetchEnvioBorrowRepayWindow = async (
+ marketId: string,
+ chainId: number,
+ minAssets: string,
+ limit: number,
+): Promise => {
+ const response = await envioGraphqlFetcher(envioBorrowRepayPageQuery, {
+ chainId,
+ marketId,
+ minAssets,
+ limit,
+ offset: 0,
+ });
+
+ const borrows = mapEnvioActivityRows(response.data?.borrows ?? [], 'MarketBorrow');
+ const repays = mapEnvioActivityRows(response.data?.repays ?? [], 'MarketRepay');
+
+ return [...borrows, ...repays].sort(sortActivityTransactions);
+};
+
+const fetchEnvioLiquidationsWindow = async (
+ marketId: string,
+ chainId: number,
+ offset: number,
+ limit: number,
+): Promise => {
+ const response = await envioGraphqlFetcher(envioLiquidationsPageQuery, {
+ chainId,
+ marketId,
+ limit,
+ offset,
+ });
+
+ return (response.data?.Morpho_Liquidate ?? []).map((event) => ({
+ type: 'MarketLiquidation' as const,
+ hash: event.txHash,
+ timestamp: toTimestamp(event.timestamp),
+ liquidator: event.caller,
+ repaidAssets: event.repaidAssets,
+ seizedAssets: event.seizedAssets,
+ badDebtAssets: event.badDebtAssets,
+ }));
+};
+
+export const fetchEnvioMarketSuppliers = async (
+ marketId: string,
+ chainId: number,
+ minShares = '0',
+ first = 8,
+ skip = 0,
+): Promise => {
+ return paginateItems(await fetchEnvioSuppliersAll(marketId, chainId, minShares), first, skip);
+};
+
+export const fetchEnvioMarketBorrowers = async (
+ marketId: string,
+ chainId: number,
+ marketState: BorrowSharePriceState,
+ minShares = '0',
+ first = 10,
+ skip = 0,
+): Promise => {
+ return paginateItems(mapCachedBorrowers(await fetchEnvioBorrowersAll(marketId, chainId, minShares), marketState), first, skip);
+};
+
+export const fetchEnvioMarketSupplies = async (
+ marketId: string,
+ chainId: number,
+ minAssets = '0',
+ first = 8,
+ skip = 0,
+): Promise => {
+ return paginateWindowedItems(await fetchEnvioSupplyWithdrawWindow(marketId, chainId, minAssets, skip + first + 1), first, skip);
+};
+
+export const fetchEnvioMarketBorrows = async (
+ marketId: string,
+ chainId: number,
+ minAssets = '0',
+ first = 8,
+ skip = 0,
+): Promise => {
+ return paginateWindowedItems(await fetchEnvioBorrowRepayWindow(marketId, chainId, minAssets, skip + first + 1), first, skip);
+};
+
+export const fetchEnvioMarketLiquidations = async (
+ marketId: string,
+ chainId: number,
+ first = 8,
+ skip = 0,
+): Promise => {
+ return paginateWindowedItems(await fetchEnvioLiquidationsWindow(marketId, chainId, skip, first + 1), first, 0);
+};
diff --git a/src/data-sources/morpho-api/market-liquidations.ts b/src/data-sources/morpho-api/market-liquidations.ts
index 1feef13d..7fe7cfd6 100644
--- a/src/data-sources/morpho-api/market-liquidations.ts
+++ b/src/data-sources/morpho-api/market-liquidations.ts
@@ -1,5 +1,5 @@
import { marketLiquidationsQuery } from '@/graphql/morpho-api-queries';
-import type { MarketLiquidationTransaction } from '@/utils/types'; // Import unified type
+import type { PaginatedMarketLiquidations } from '@/utils/types';
import { morphoGraphqlFetcher } from './fetchers';
// Type for the raw Morpho API response structure
@@ -19,6 +19,9 @@ type MorphoAPILiquidationsResponse = {
data?: {
transactions?: {
items?: MorphoAPILiquidationItem[];
+ pageInfo?: {
+ countTotal: number;
+ };
};
};
};
@@ -26,12 +29,13 @@ type MorphoAPILiquidationsResponse = {
/**
* Fetches market liquidation activities from the Morpho Blue API.
* @param marketId The unique key or ID of the market.
- * @returns A promise resolving to an array of unified MarketLiquidationTransaction objects.
+ * @returns A promise resolving to paginated unified MarketLiquidationTransaction objects.
*/
-export const fetchMorphoMarketLiquidations = async (marketId: string): Promise => {
+export const fetchMorphoMarketLiquidations = async (marketId: string, first = 8, skip = 0): Promise => {
const variables = {
uniqueKey: marketId,
- // Morpho API query might not need first/skip for liquidations, adjust if needed
+ first,
+ skip,
};
try {
@@ -39,22 +43,25 @@ export const fetchMorphoMarketLiquidations = async (marketId: string): Promise ({
- type: 'MarketLiquidation', // Standardize type
- hash: item.hash,
- timestamp: item.timestamp,
- liquidator: item.data.liquidator,
- repaidAssets: item.data.repaidAssets,
- seizedAssets: item.data.seizedAssets,
- badDebtAssets: item.data.badDebtAssets,
- // Removed optional fields not present in the simplified type
- }));
+ return {
+ items: items.map((item) => ({
+ type: 'MarketLiquidation', // Standardize type
+ hash: item.hash,
+ timestamp: item.timestamp,
+ liquidator: item.data.liquidator,
+ repaidAssets: item.data.repaidAssets,
+ seizedAssets: item.data.seizedAssets,
+ badDebtAssets: item.data.badDebtAssets,
+ })),
+ totalCount,
+ };
} catch (error) {
console.error(`Error fetching or processing Morpho API market liquidations for ${marketId}:`, error);
if (error instanceof Error) {
diff --git a/src/data-sources/subgraph/fetchers.ts b/src/data-sources/subgraph/fetchers.ts
index f48562be..ae01d002 100644
--- a/src/data-sources/subgraph/fetchers.ts
+++ b/src/data-sources/subgraph/fetchers.ts
@@ -1,3 +1,22 @@
+import type { SupportedNetworks } from '@/utils/networks';
+import { getSubgraphUrl } from '@/utils/subgraph-urls';
+
+const createSubgraphProviderError = (network: SupportedNetworks, message: string): Error => {
+ return Object.assign(new Error(message), {
+ source: 'subgraph' as const,
+ network,
+ });
+};
+
+export const requireSubgraphUrl = (network: SupportedNetworks): string => {
+ const subgraphUrl = getSubgraphUrl(network);
+ if (subgraphUrl) {
+ return subgraphUrl;
+ }
+
+ throw createSubgraphProviderError(network, `No Subgraph URL configured for network ${network}`);
+};
+
export const subgraphGraphqlFetcher = async (
apiUrl: string, // Subgraph URL can vary
query: string,
diff --git a/src/data-sources/subgraph/market-borrowers.ts b/src/data-sources/subgraph/market-borrowers.ts
index f03be539..30ea7f5e 100644
--- a/src/data-sources/subgraph/market-borrowers.ts
+++ b/src/data-sources/subgraph/market-borrowers.ts
@@ -1,8 +1,8 @@
import { marketBorrowersQuery } from '@/graphql/morpho-subgraph-queries';
import type { SupportedNetworks } from '@/utils/networks';
-import { getSubgraphUrl } from '@/utils/subgraph-urls';
+import { convertSharesToAssets } from '@/utils/positions';
import type { MarketBorrower, PaginatedMarketBorrowers } from '@/utils/types';
-import { subgraphGraphqlFetcher } from './fetchers';
+import { requireSubgraphUrl, subgraphGraphqlFetcher } from './fetchers';
// Type for the Subgraph response
type SubgraphBorrowerItem = {
@@ -58,11 +58,7 @@ export const fetchSubgraphMarketBorrowers = async (
pageSize = 10,
skip = 0,
): Promise => {
- const subgraphUrl = getSubgraphUrl(network);
- if (!subgraphUrl) {
- console.warn(`No Subgraph URL configured for network: ${network}. Returning empty results.`);
- return { items: [], totalCount: 0 };
- }
+ const subgraphUrl = requireSubgraphUrl(network);
const cacheKey = getCacheKey(marketId, network, minShares);
const now = Date.now();
@@ -72,9 +68,7 @@ export const fetchSubgraphMarketBorrowers = async (
let allMappedItems: MarketBorrower[];
if (cached && now - cached.timestamp < CACHE_TTL) {
- // Use cached data
allMappedItems = cached.data;
- console.log(`Using cached borrowers data for ${marketId} (${allMappedItems.length} items)`);
} else {
// Fetch fresh data - always fetch top 1000 items (subgraph limit)
const variables = {
@@ -86,6 +80,12 @@ export const fetchSubgraphMarketBorrowers = async (
try {
const result = await subgraphGraphqlFetcher(subgraphUrl, marketBorrowersQuery, variables);
+ if (!result.data) {
+ throw Object.assign(new Error(`Subgraph returned no borrower data for market ${marketId} on network ${network}`), {
+ source: 'subgraph' as const,
+ network,
+ });
+ }
const positions = result.data?.positions ?? [];
const market = result.data?.market;
@@ -99,12 +99,7 @@ export const fetchSubgraphMarketBorrowers = async (
// Convert borrow shares to borrow assets
// borrowAssets = (shares * totalBorrow) / totalBorrowShares
const shares = BigInt(position.shares);
- let borrowAssets = '0';
-
- if (totalBorrowShares > 0n) {
- const assets = (shares * totalBorrow) / totalBorrowShares;
- borrowAssets = assets.toString();
- }
+ const borrowAssets = convertSharesToAssets(shares, totalBorrow, totalBorrowShares).toString();
// Get collateral balance from nested positions (should be exactly 1)
const collateralBalance = position.account.positions[0]?.balance ?? '0';
@@ -121,8 +116,6 @@ export const fetchSubgraphMarketBorrowers = async (
data: allMappedItems,
timestamp: now,
});
-
- console.log(`Fetched and cached ${allMappedItems.length} borrowers for ${marketId}`);
} catch (error) {
console.error(`Error fetching or processing Subgraph market borrowers for ${marketId}:`, error);
if (error instanceof Error) {
diff --git a/src/data-sources/subgraph/market-borrows.ts b/src/data-sources/subgraph/market-borrows.ts
index 2e5d7a7a..c94901d7 100644
--- a/src/data-sources/subgraph/market-borrows.ts
+++ b/src/data-sources/subgraph/market-borrows.ts
@@ -1,8 +1,7 @@
import { marketBorrowsRepaysQuery } from '@/graphql/morpho-subgraph-queries';
import type { SupportedNetworks } from '@/utils/networks';
-import { getSubgraphUrl } from '@/utils/subgraph-urls';
import type { MarketActivityTransaction, PaginatedMarketActivityTransactions } from '@/utils/types'; // Import shared type
-import { subgraphGraphqlFetcher } from './fetchers';
+import { requireSubgraphUrl, subgraphGraphqlFetcher } from './fetchers';
// Types specific to the Subgraph response for this query
type SubgraphBorrowRepayItem = {
@@ -23,10 +22,9 @@ type SubgraphBorrowsRepaysResponse = {
/**
* Fetches market borrow/repay activities from the Subgraph.
- * NOTE: Because borrows and repays are fetched separately and merged client-side,
- * we cannot do proper server-side pagination. Instead, we fetch a large batch (200 items)
- * from both sources, merge and sort them, then apply client-side pagination.
- * This ensures correct ordering and prevents skipped items.
+ * Because borrows and repays are fetched separately and merged client-side,
+ * we fetch the requested prefix window from both streams, merge them, and then
+ * derive the requested page plus `hasNextPage`.
* @param marketId The ID of the market.
* @param loanAssetId The address of the loan asset.
* @param network The blockchain network.
@@ -43,13 +41,9 @@ export const fetchSubgraphMarketBorrows = async (
first = 8,
skip = 0,
): Promise => {
- const subgraphUrl = getSubgraphUrl(network);
- if (!subgraphUrl) {
- console.warn(`No Subgraph URL configured for network: ${network}. Returning empty results.`);
- return { items: [], totalCount: 0 };
- }
+ const subgraphUrl = requireSubgraphUrl(network);
- const fetchBatchSize = 200;
+ const fetchBatchSize = skip + first + 1;
const variables = {
marketId,
@@ -61,6 +55,12 @@ export const fetchSubgraphMarketBorrows = async (
try {
const result = await subgraphGraphqlFetcher(subgraphUrl, marketBorrowsRepaysQuery, variables);
+ if (!result.data) {
+ throw Object.assign(new Error(`Subgraph returned no borrow activity data for market ${marketId} on network ${network}`), {
+ source: 'subgraph' as const,
+ network,
+ });
+ }
const borrows = result.data?.borrows ?? [];
const repays = result.data?.repays ?? [];
@@ -88,11 +88,13 @@ export const fetchSubgraphMarketBorrows = async (
const startIndex = skip;
const endIndex = skip + first;
const items = combined.slice(startIndex, endIndex);
- const totalCount = combined.length;
+ const hasNextPage = combined.length > endIndex;
+ const totalCount = skip >= combined.length ? combined.length : Math.max(combined.length, skip + items.length + Number(hasNextPage));
return {
items,
totalCount,
+ hasNextPage,
};
} catch (error) {
console.error(`Error fetching or processing Subgraph market borrows for ${marketId}:`, error);
diff --git a/src/data-sources/subgraph/market-liquidations.ts b/src/data-sources/subgraph/market-liquidations.ts
index 3880d328..7e7373f7 100644
--- a/src/data-sources/subgraph/market-liquidations.ts
+++ b/src/data-sources/subgraph/market-liquidations.ts
@@ -1,8 +1,7 @@
-import { marketLiquidationsAndBadDebtQuery } from '@/graphql/morpho-subgraph-queries';
+import { marketLiquidationBadDebtQuery, marketLiquidationsPageQuery } from '@/graphql/morpho-subgraph-queries';
import type { SupportedNetworks } from '@/utils/networks';
-import { getSubgraphUrl } from '@/utils/subgraph-urls';
-import type { MarketLiquidationTransaction } from '@/utils/types'; // Import simplified type
-import { subgraphGraphqlFetcher } from './fetchers';
+import type { MarketLiquidationTransaction, PaginatedMarketLiquidations } from '@/utils/types';
+import { requireSubgraphUrl, subgraphGraphqlFetcher } from './fetchers';
// Types specific to the Subgraph response items
type SubgraphLiquidateItem = {
@@ -27,53 +26,87 @@ type SubgraphBadDebtItem = {
type SubgraphLiquidationsResponse = {
data?: {
liquidates?: SubgraphLiquidateItem[];
+ };
+};
+
+type SubgraphLiquidationBadDebtResponse = {
+ data?: {
badDebtRealizations?: SubgraphBadDebtItem[];
};
};
/**
* Fetches market liquidation activities from the Subgraph.
- * Combines liquidation events with associated bad debt realizations.
+ * Fetches only the requested liquidation page and then resolves bad debt for those page rows.
* @param marketId The ID of the market.
* @param network The blockchain network.
- * @returns A promise resolving to an array of simplified MarketLiquidationTransaction objects.
+ * @returns A promise resolving to paginated MarketLiquidationTransaction objects.
*/
export const fetchSubgraphMarketLiquidations = async (
marketId: string,
network: SupportedNetworks,
-): Promise => {
- const subgraphUrl = getSubgraphUrl(network);
- if (!subgraphUrl) {
- console.warn(`No Subgraph URL configured for network: ${network}. Returning empty results.`);
- return [];
- }
+ first = 8,
+ skip = 0,
+): Promise => {
+ const subgraphUrl = requireSubgraphUrl(network);
- const variables = { marketId };
+ const variables = {
+ marketId,
+ first: first + 1,
+ skip,
+ };
try {
- const result = await subgraphGraphqlFetcher(subgraphUrl, marketLiquidationsAndBadDebtQuery, variables);
+ const result = await subgraphGraphqlFetcher(subgraphUrl, marketLiquidationsPageQuery, variables);
+ if (!result.data) {
+ throw Object.assign(new Error(`Subgraph returned no liquidation data for market ${marketId} on network ${network}`), {
+ source: 'subgraph' as const,
+ network,
+ });
+ }
const liquidates = result.data?.liquidates ?? [];
- const badDebtItems = result.data?.badDebtRealizations ?? [];
+ const hasNextPage = liquidates.length > first;
+ const pageLiquidations = liquidates.slice(0, first);
+ const liquidationIds = pageLiquidations.map((liquidation) => liquidation.id);
+ const badDebtResult =
+ liquidationIds.length === 0
+ ? null
+ : await subgraphGraphqlFetcher(subgraphUrl, marketLiquidationBadDebtQuery, {
+ liquidationIds,
+ });
+
+ if (badDebtResult && !badDebtResult.data) {
+ throw Object.assign(new Error(`Subgraph returned no bad debt data for market ${marketId} on network ${network}`), {
+ source: 'subgraph' as const,
+ network,
+ });
+ }
+
+ const badDebtItems = badDebtResult?.data?.badDebtRealizations ?? [];
// Create a map for quick lookup of bad debt by liquidation ID
const badDebtMap = new Map();
- badDebtItems.forEach((item) => {
+ for (const item of badDebtItems) {
badDebtMap.set(item.liquidation.id, item.badDebt);
- });
+ }
// Map liquidations, adding bad debt information
- return liquidates.map((liq) => ({
+ const items: MarketLiquidationTransaction[] = pageLiquidations.map((liq) => ({
type: 'MarketLiquidation',
hash: liq.hash,
timestamp: typeof liq.timestamp === 'string' ? Number.parseInt(liq.timestamp, 10) : liq.timestamp,
- // Subgraph query doesn't provide liquidator, use empty string or default
liquidator: liq.liquidator.id,
- repaidAssets: liq.repaid, // Loan asset repaid
- seizedAssets: liq.amount, // Collateral seized
- // Fetch bad debt from the map using the liquidate event ID
- badDebtAssets: badDebtMap.get(liq.id) ?? '0', // Default to '0' if no bad debt entry
+ repaidAssets: liq.repaid,
+ seizedAssets: liq.amount,
+ badDebtAssets: badDebtMap.get(liq.id) ?? '0',
}));
+
+ return {
+ items,
+ totalCount: skip + items.length + Number(hasNextPage),
+ hasNextPage,
+ };
} catch (error) {
console.error(`Error fetching or processing Subgraph market liquidations for ${marketId}:`, error);
if (error instanceof Error) {
diff --git a/src/data-sources/subgraph/market-suppliers.ts b/src/data-sources/subgraph/market-suppliers.ts
index 9558e13a..1f11e575 100644
--- a/src/data-sources/subgraph/market-suppliers.ts
+++ b/src/data-sources/subgraph/market-suppliers.ts
@@ -1,8 +1,7 @@
import { marketSuppliersQuery } from '@/graphql/morpho-subgraph-queries';
import type { SupportedNetworks } from '@/utils/networks';
-import { getSubgraphUrl } from '@/utils/subgraph-urls';
import type { MarketSupplier, PaginatedMarketSuppliers } from '@/utils/types';
-import { subgraphGraphqlFetcher } from './fetchers';
+import { requireSubgraphUrl, subgraphGraphqlFetcher } from './fetchers';
// Type for the Subgraph response
type SubgraphSupplierItem = {
@@ -50,11 +49,7 @@ export const fetchSubgraphMarketSuppliers = async (
pageSize = 8,
skip = 0,
): Promise => {
- const subgraphUrl = getSubgraphUrl(network);
- if (!subgraphUrl) {
- console.warn(`No Subgraph URL configured for network: ${network}. Returning empty results.`);
- return { items: [], totalCount: 0 };
- }
+ const subgraphUrl = requireSubgraphUrl(network);
const cacheKey = getCacheKey(marketId, network, minShares);
const now = Date.now();
@@ -64,9 +59,7 @@ export const fetchSubgraphMarketSuppliers = async (
let allMappedItems: MarketSupplier[];
if (cached && now - cached.timestamp < CACHE_TTL) {
- // Use cached data
allMappedItems = cached.data;
- console.log(`Using cached suppliers data for ${marketId} (${allMappedItems.length} items)`);
} else {
// Fetch fresh data - always fetch top 1000 items (subgraph limit)
const variables = {
@@ -78,6 +71,12 @@ export const fetchSubgraphMarketSuppliers = async (
try {
const result = await subgraphGraphqlFetcher(subgraphUrl, marketSuppliersQuery, variables);
+ if (!result.data) {
+ throw Object.assign(new Error(`Subgraph returned no supplier data for market ${marketId} on network ${network}`), {
+ source: 'subgraph' as const,
+ network,
+ });
+ }
const positions = result.data?.positions ?? [];
@@ -92,8 +91,6 @@ export const fetchSubgraphMarketSuppliers = async (
data: allMappedItems,
timestamp: now,
});
-
- console.log(`Fetched and cached ${allMappedItems.length} suppliers for ${marketId}`);
} catch (error) {
console.error(`Error fetching or processing Subgraph market suppliers for ${marketId}:`, error);
if (error instanceof Error) {
diff --git a/src/data-sources/subgraph/market-supplies.ts b/src/data-sources/subgraph/market-supplies.ts
index edb49b1a..f099be2b 100644
--- a/src/data-sources/subgraph/market-supplies.ts
+++ b/src/data-sources/subgraph/market-supplies.ts
@@ -1,8 +1,7 @@
import { marketDepositsWithdrawsQuery } from '@/graphql/morpho-subgraph-queries';
import type { SupportedNetworks } from '@/utils/networks';
-import { getSubgraphUrl } from '@/utils/subgraph-urls'; // Import shared utility
import type { MarketActivityTransaction, PaginatedMarketActivityTransactions } from '@/utils/types';
-import { subgraphGraphqlFetcher } from './fetchers'; // Import shared fetcher
+import { requireSubgraphUrl, subgraphGraphqlFetcher } from './fetchers';
// Types specific to the Subgraph response for this query
type SubgraphSupplyWithdrawItem = {
@@ -24,10 +23,9 @@ type SubgraphSuppliesWithdrawsResponse = {
/**
* Fetches market supply/withdraw activities (deposits/withdraws of loan asset) from the Subgraph.
- * NOTE: Because deposits and withdraws are fetched separately and merged client-side,
- * we cannot do proper server-side pagination. Instead, we fetch a large batch (200 items)
- * from both sources, merge and sort them, then apply client-side pagination.
- * This ensures correct ordering and prevents skipped items.
+ * Because deposits and withdraws are fetched separately and merged client-side,
+ * we fetch the requested prefix window from both streams, merge them, and then
+ * derive the requested page plus `hasNextPage`.
* @param marketId The ID of the market.
* @param loanAssetId The address of the loan asset.
* @param network The blockchain network.
@@ -44,13 +42,9 @@ export const fetchSubgraphMarketSupplies = async (
first = 8,
skip = 0,
): Promise => {
- const subgraphUrl = getSubgraphUrl(network);
- if (!subgraphUrl) {
- console.warn(`No Subgraph URL configured for network: ${network}. Returning empty results.`);
- return { items: [], totalCount: 0 };
- }
+ const subgraphUrl = requireSubgraphUrl(network);
- const fetchBatchSize = 200;
+ const fetchBatchSize = skip + first + 1;
const variables = {
marketId,
@@ -62,6 +56,12 @@ export const fetchSubgraphMarketSupplies = async (
try {
const result = await subgraphGraphqlFetcher(subgraphUrl, marketDepositsWithdrawsQuery, variables);
+ if (!result.data) {
+ throw Object.assign(new Error(`Subgraph returned no supply activity data for market ${marketId} on network ${network}`), {
+ source: 'subgraph' as const,
+ network,
+ });
+ }
const deposits = result.data?.deposits ?? [];
const withdraws = result.data?.withdraws ?? [];
@@ -89,11 +89,13 @@ export const fetchSubgraphMarketSupplies = async (
const startIndex = skip;
const endIndex = skip + first;
const items = combined.slice(startIndex, endIndex);
- const totalCount = combined.length;
+ const hasNextPage = combined.length > endIndex;
+ const totalCount = skip >= combined.length ? combined.length : Math.max(combined.length, skip + items.length + Number(hasNextPage));
return {
items,
totalCount,
+ hasNextPage,
};
} catch (error) {
console.error(`Error fetching or processing Subgraph market supplies for ${marketId}:`, error);
diff --git a/src/features/admin-v2/components/stats-asset-table.tsx b/src/features/admin-v2/components/stats-asset-table.tsx
index 17380586..1700bc27 100644
--- a/src/features/admin-v2/components/stats-asset-table.tsx
+++ b/src/features/admin-v2/components/stats-asset-table.tsx
@@ -257,6 +257,7 @@ export function StatsAssetTable({ transactions, isLoading }: StatsAssetTableProp