Commit eaa9b519 authored by Zach Pomerantz's avatar Zach Pomerantz Committed by GitHub

feat: nft polling (#5083)

* feat: nft polling

* docs: document the poll

* chore: add todo for cursor tracking

* fix: poll all pages

* 5s polling
Co-authored-by: default avatarCharlie <charles@bachmeier.io>
parent 6f68980d
import ms from 'ms.macro' import ms from 'ms.macro'
import { Variables } from 'react-relay' import { Environment, Network, RecordSource, Store } from 'relay-runtime'
import { Environment, Network, RecordSource, RequestParameters, Store } from 'relay-runtime'
import RelayQueryResponseCache from 'relay-runtime/lib/network/RelayQueryResponseCache'
import fetchGraphQL from './fetchGraphQL' import fetchGraphQL from './fetchGraphQL'
// max number of request in cache, least-recently updated entries purged first
const size = 250
// number in milliseconds, how long records stay valid in cache
const ttl = ms`5m`
export const cache = new RelayQueryResponseCache({ size, ttl })
const fetchQuery = async function wrappedFetchQuery(params: RequestParameters, variables: Variables) { // This makes it possible (and more likely) to be able to reuse data when navigating back to a page,
const queryID = params.name // tab or piece of content that has been visited before. These settings together configure the cache
const cachedData = cache.get(queryID, variables) // to serve the last 250 records, so long as they are less than 5 minutes old:
const gcReleaseBufferSize = 250
const queryCacheExpirationTime = ms`5m`
if (cachedData !== null) return cachedData export const CachingEnvironment = new Environment({
network: Network.create(fetchGraphQL),
return fetchGraphQL(params, variables).then((data) => { store: new Store(new RecordSource(), { gcReleaseBufferSize, queryCacheExpirationTime }),
if (params.operationKind !== 'mutation') {
cache.set(queryID, variables, data)
}
return data
})
}
// This property tells Relay to not immediately clear its cache when the user
// navigates around the app. Relay will hold onto the specified number of
// query results, allowing the user to return to recently visited pages
// and reusing cached data if its available/fresh.
const gcReleaseBufferSize = 10
const queryCacheExpirationTime = ms`1m`
const store = new Store(new RecordSource(), { gcReleaseBufferSize, queryCacheExpirationTime })
const network = Network.create(fetchQuery)
// Export a singleton instance of Relay Environment configured with our network function:
export default new Environment({
network,
store,
}) })
export default CachingEnvironment
import graphql from 'babel-plugin-relay/macro' import graphql from 'babel-plugin-relay/macro'
import { parseEther } from 'ethers/lib/utils' import { parseEther } from 'ethers/lib/utils'
import useInterval from 'lib/hooks/useInterval'
import ms from 'ms.macro'
import { GenieAsset, Rarity, SellOrder } from 'nft/types' import { GenieAsset, Rarity, SellOrder } from 'nft/types'
import { useLazyLoadQuery, usePaginationFragment } from 'react-relay' import { useCallback, useMemo, useState } from 'react'
import { fetchQuery, useLazyLoadQuery, usePaginationFragment, useRelayEnvironment } from 'react-relay'
import { AssetPaginationQuery } from './__generated__/AssetPaginationQuery.graphql' import { AssetPaginationQuery } from './__generated__/AssetPaginationQuery.graphql'
import { AssetQuery, NftAssetsFilterInput, NftAssetSortableField } from './__generated__/AssetQuery.graphql' import { AssetQuery, NftAssetsFilterInput, NftAssetSortableField } from './__generated__/AssetQuery.graphql'
...@@ -117,74 +120,94 @@ export function useAssetsQuery( ...@@ -117,74 +120,94 @@ export function useAssetsQuery(
last?: number, last?: number,
before?: string before?: string
) { ) {
const queryData = useLazyLoadQuery<AssetQuery>(assetQuery, { const vars = useMemo(
address, () => ({ address, orderBy, asc, filter, first, after, last, before }),
orderBy, [address, after, asc, before, filter, first, last, orderBy]
asc, )
filter, const [queryOptions, setQueryOptions] = useState({ fetchKey: 0 })
first, const queryData = useLazyLoadQuery<AssetQuery>(assetQuery, vars, queryOptions)
after,
last,
before,
})
const { data, hasNext, loadNext, isLoadingNext } = usePaginationFragment<AssetPaginationQuery, any>( const { data, hasNext, loadNext, isLoadingNext } = usePaginationFragment<AssetPaginationQuery, any>(
assetPaginationQuery, assetPaginationQuery,
queryData queryData
) )
const assets: GenieAsset[] = data.nftAssets?.edges?.map((queryAsset: { node: any }) => { // Poll for updates.
const asset = queryAsset.node const POLLING_INTERVAL = ms`5s`
const ethPrice = parseEther( const environment = useRelayEnvironment()
asset.listings?.edges[0]?.node.price.value?.toLocaleString('fullwide', { useGrouping: false }) ?? '0' const refresh = useCallback(async () => {
).toString() const length = data.nftAssets?.edges?.length
return { // Initiate a network request. When it resolves, refresh the UI from store (to avoid re-triggering Suspense);
id: asset.id, // see: https://relay.dev/docs/guided-tour/refetching/refreshing-queries/#if-you-need-to-avoid-suspense-1.
address: asset.collection.nftContracts[0]?.address, await fetchQuery<AssetQuery>(environment, assetQuery, { ...vars, first: length }).toPromise()
notForSale: asset.listings?.edges.length === 0, setQueryOptions(({ fetchKey }) => ({
collectionName: asset.collection?.name, fetchKey: fetchKey + 1,
collectionSymbol: asset.collection?.image?.url, fetchPolicy: 'store-only',
imageUrl: asset.image?.url, }))
animationUrl: asset.animationUrl, }, [data.nftAssets?.edges?.length, environment, vars])
marketplace: asset.listings?.edges[0]?.node.marketplace.toLowerCase(), // NB: This will poll every POLLING_INTERVAL, *not* every POLLING_INTERVAL from the last successful poll.
name: asset.name, // TODO(WEB-2004): Update useInterval to wait for the fn to complete before rescheduling.
priceInfo: asset.listings useInterval(refresh, POLLING_INTERVAL)
? {
ETHPrice: ethPrice, // It is especially important for this to be memoized to avoid re-rendering from polling if data is unchanged.
baseAsset: 'ETH', const assets: GenieAsset[] = useMemo(
baseDecimals: '18', () =>
basePrice: ethPrice, data.nftAssets?.edges?.map((queryAsset: { node: any }) => {
} const asset = queryAsset.node
: undefined, const ethPrice = parseEther(
susFlag: asset.suspiciousFlag, asset.listings?.edges[0]?.node.price.value?.toLocaleString('fullwide', { useGrouping: false }) ?? '0'
sellorders: asset.listings?.edges.map((listingNode: { node: SellOrder }) => { ).toString()
return { return {
...listingNode.node, id: asset.id,
protocolParameters: listingNode.node.protocolParameters address: asset.collection.nftContracts[0]?.address,
? JSON.parse(listingNode.node.protocolParameters.toString()) notForSale: asset.listings?.edges.length === 0,
collectionName: asset.collection?.name,
collectionSymbol: asset.collection?.image?.url,
imageUrl: asset.image?.url,
animationUrl: asset.animationUrl,
marketplace: asset.listings?.edges[0]?.node.marketplace.toLowerCase(),
name: asset.name,
priceInfo: asset.listings
? {
ETHPrice: ethPrice,
baseAsset: 'ETH',
baseDecimals: '18',
basePrice: ethPrice,
}
: undefined, : undefined,
susFlag: asset.suspiciousFlag,
sellorders: asset.listings?.edges.map((listingNode: { node: SellOrder }) => {
return {
...listingNode.node,
protocolParameters: listingNode.node.protocolParameters
? JSON.parse(listingNode.node.protocolParameters.toString())
: undefined,
}
}),
smallImageUrl: asset.smallImage?.url,
tokenId: asset.tokenId,
tokenType: asset.collection.nftContracts[0]?.standard,
// totalCount?: number, // TODO waiting for BE changes
collectionIsVerified: asset.collection?.isVerified,
rarity: {
primaryProvider: 'Rarity Sniper', // TODO update when backend adds more providers
providers: asset.rarities.map((rarity: Rarity) => {
return {
...rarity,
provider: 'Rarity Sniper',
}
}),
},
owner: asset.ownerAddress,
creator: {
profile_img_url: asset.collection?.creator?.profileImage?.url,
address: asset.collection?.creator?.address,
},
metadataUrl: asset.metadataUrl,
} }
}), }),
smallImageUrl: asset.smallImage?.url, [data.nftAssets?.edges]
tokenId: asset.tokenId, )
tokenType: asset.collection.nftContracts[0]?.standard,
// totalCount?: number, // TODO waiting for BE changes
collectionIsVerified: asset.collection?.isVerified,
rarity: {
primaryProvider: 'Rarity Sniper', // TODO update when backend adds more providers
providers: asset.rarities.map((rarity: Rarity) => {
return {
...rarity,
provider: 'Rarity Sniper',
}
}),
},
owner: asset.ownerAddress,
creator: {
profile_img_url: asset.collection?.creator?.profileImage?.url,
address: asset.collection?.creator?.address,
},
metadataUrl: asset.metadataUrl,
}
})
return { assets, hasNext, isLoadingNext, loadNext } return { assets, hasNext, isLoadingNext, loadNext }
} }
Markdown is supported
0% or
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment