Commit eaa9b519 authored by Zach Pomerantz's avatar Zach Pomerantz Committed by GitHub

feat: nft polling (#5083)

* feat: nft polling

* docs: document the poll

* chore: add todo for cursor tracking

* fix: poll all pages

* 5s polling
Co-authored-by: default avatarCharlie <charles@bachmeier.io>
parent 6f68980d
import ms from 'ms.macro'
import { Variables } from 'react-relay'
import { Environment, Network, RecordSource, RequestParameters, Store } from 'relay-runtime'
import RelayQueryResponseCache from 'relay-runtime/lib/network/RelayQueryResponseCache'
import { Environment, Network, RecordSource, Store } from 'relay-runtime'
import fetchGraphQL from './fetchGraphQL'
// max number of request in cache, least-recently updated entries purged first
const size = 250
// number in milliseconds, how long records stay valid in cache
const ttl = ms`5m`
export const cache = new RelayQueryResponseCache({ size, ttl })
const fetchQuery = async function wrappedFetchQuery(params: RequestParameters, variables: Variables) {
const queryID = params.name
const cachedData = cache.get(queryID, variables)
// This makes it possible (and more likely) to be able to reuse data when navigating back to a page,
// tab or piece of content that has been visited before. These settings together configure the cache
// to serve the last 250 records, so long as they are less than 5 minutes old:
const gcReleaseBufferSize = 250
const queryCacheExpirationTime = ms`5m`
if (cachedData !== null) return cachedData
return fetchGraphQL(params, variables).then((data) => {
if (params.operationKind !== 'mutation') {
cache.set(queryID, variables, data)
}
return data
})
}
// This property tells Relay to not immediately clear its cache when the user
// navigates around the app. Relay will hold onto the specified number of
// query results, allowing the user to return to recently visited pages
// and reusing cached data if its available/fresh.
const gcReleaseBufferSize = 10
const queryCacheExpirationTime = ms`1m`
const store = new Store(new RecordSource(), { gcReleaseBufferSize, queryCacheExpirationTime })
const network = Network.create(fetchQuery)
// Export a singleton instance of Relay Environment configured with our network function:
export default new Environment({
network,
store,
export const CachingEnvironment = new Environment({
network: Network.create(fetchGraphQL),
store: new Store(new RecordSource(), { gcReleaseBufferSize, queryCacheExpirationTime }),
})
export default CachingEnvironment
import graphql from 'babel-plugin-relay/macro'
import { parseEther } from 'ethers/lib/utils'
import useInterval from 'lib/hooks/useInterval'
import ms from 'ms.macro'
import { GenieAsset, Rarity, SellOrder } from 'nft/types'
import { useLazyLoadQuery, usePaginationFragment } from 'react-relay'
import { useCallback, useMemo, useState } from 'react'
import { fetchQuery, useLazyLoadQuery, usePaginationFragment, useRelayEnvironment } from 'react-relay'
import { AssetPaginationQuery } from './__generated__/AssetPaginationQuery.graphql'
import { AssetQuery, NftAssetsFilterInput, NftAssetSortableField } from './__generated__/AssetQuery.graphql'
......@@ -117,74 +120,94 @@ export function useAssetsQuery(
last?: number,
before?: string
) {
const queryData = useLazyLoadQuery<AssetQuery>(assetQuery, {
address,
orderBy,
asc,
filter,
first,
after,
last,
before,
})
const vars = useMemo(
() => ({ address, orderBy, asc, filter, first, after, last, before }),
[address, after, asc, before, filter, first, last, orderBy]
)
const [queryOptions, setQueryOptions] = useState({ fetchKey: 0 })
const queryData = useLazyLoadQuery<AssetQuery>(assetQuery, vars, queryOptions)
const { data, hasNext, loadNext, isLoadingNext } = usePaginationFragment<AssetPaginationQuery, any>(
assetPaginationQuery,
queryData
)
const assets: GenieAsset[] = data.nftAssets?.edges?.map((queryAsset: { node: any }) => {
const asset = queryAsset.node
const ethPrice = parseEther(
asset.listings?.edges[0]?.node.price.value?.toLocaleString('fullwide', { useGrouping: false }) ?? '0'
).toString()
return {
id: asset.id,
address: asset.collection.nftContracts[0]?.address,
notForSale: asset.listings?.edges.length === 0,
collectionName: asset.collection?.name,
collectionSymbol: asset.collection?.image?.url,
imageUrl: asset.image?.url,
animationUrl: asset.animationUrl,
marketplace: asset.listings?.edges[0]?.node.marketplace.toLowerCase(),
name: asset.name,
priceInfo: asset.listings
? {
ETHPrice: ethPrice,
baseAsset: 'ETH',
baseDecimals: '18',
basePrice: ethPrice,
}
: undefined,
susFlag: asset.suspiciousFlag,
sellorders: asset.listings?.edges.map((listingNode: { node: SellOrder }) => {
// Poll for updates.
const POLLING_INTERVAL = ms`5s`
const environment = useRelayEnvironment()
const refresh = useCallback(async () => {
const length = data.nftAssets?.edges?.length
// Initiate a network request. When it resolves, refresh the UI from store (to avoid re-triggering Suspense);
// see: https://relay.dev/docs/guided-tour/refetching/refreshing-queries/#if-you-need-to-avoid-suspense-1.
await fetchQuery<AssetQuery>(environment, assetQuery, { ...vars, first: length }).toPromise()
setQueryOptions(({ fetchKey }) => ({
fetchKey: fetchKey + 1,
fetchPolicy: 'store-only',
}))
}, [data.nftAssets?.edges?.length, environment, vars])
// NB: This will poll every POLLING_INTERVAL, *not* every POLLING_INTERVAL from the last successful poll.
// TODO(WEB-2004): Update useInterval to wait for the fn to complete before rescheduling.
useInterval(refresh, POLLING_INTERVAL)
// It is especially important for this to be memoized to avoid re-rendering from polling if data is unchanged.
const assets: GenieAsset[] = useMemo(
() =>
data.nftAssets?.edges?.map((queryAsset: { node: any }) => {
const asset = queryAsset.node
const ethPrice = parseEther(
asset.listings?.edges[0]?.node.price.value?.toLocaleString('fullwide', { useGrouping: false }) ?? '0'
).toString()
return {
...listingNode.node,
protocolParameters: listingNode.node.protocolParameters
? JSON.parse(listingNode.node.protocolParameters.toString())
id: asset.id,
address: asset.collection.nftContracts[0]?.address,
notForSale: asset.listings?.edges.length === 0,
collectionName: asset.collection?.name,
collectionSymbol: asset.collection?.image?.url,
imageUrl: asset.image?.url,
animationUrl: asset.animationUrl,
marketplace: asset.listings?.edges[0]?.node.marketplace.toLowerCase(),
name: asset.name,
priceInfo: asset.listings
? {
ETHPrice: ethPrice,
baseAsset: 'ETH',
baseDecimals: '18',
basePrice: ethPrice,
}
: undefined,
susFlag: asset.suspiciousFlag,
sellorders: asset.listings?.edges.map((listingNode: { node: SellOrder }) => {
return {
...listingNode.node,
protocolParameters: listingNode.node.protocolParameters
? JSON.parse(listingNode.node.protocolParameters.toString())
: undefined,
}
}),
smallImageUrl: asset.smallImage?.url,
tokenId: asset.tokenId,
tokenType: asset.collection.nftContracts[0]?.standard,
// totalCount?: number, // TODO waiting for BE changes
collectionIsVerified: asset.collection?.isVerified,
rarity: {
primaryProvider: 'Rarity Sniper', // TODO update when backend adds more providers
providers: asset.rarities.map((rarity: Rarity) => {
return {
...rarity,
provider: 'Rarity Sniper',
}
}),
},
owner: asset.ownerAddress,
creator: {
profile_img_url: asset.collection?.creator?.profileImage?.url,
address: asset.collection?.creator?.address,
},
metadataUrl: asset.metadataUrl,
}
}),
smallImageUrl: asset.smallImage?.url,
tokenId: asset.tokenId,
tokenType: asset.collection.nftContracts[0]?.standard,
// totalCount?: number, // TODO waiting for BE changes
collectionIsVerified: asset.collection?.isVerified,
rarity: {
primaryProvider: 'Rarity Sniper', // TODO update when backend adds more providers
providers: asset.rarities.map((rarity: Rarity) => {
return {
...rarity,
provider: 'Rarity Sniper',
}
}),
},
owner: asset.ownerAddress,
creator: {
profile_img_url: asset.collection?.creator?.profileImage?.url,
address: asset.collection?.creator?.address,
},
metadataUrl: asset.metadataUrl,
}
})
[data.nftAssets?.edges]
)
return { assets, hasNext, isLoadingNext, loadNext }
}
Markdown is supported
0% or
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment