mirror of
https://github.com/ourongxing/newsnow.git
synced 2025-01-19 03:09:14 +08:00
fix: refactor fetching and refetch logic
This commit is contained in:
parent
2aea2aafde
commit
552b75d9d1
@ -1,89 +0,0 @@
|
||||
import type { SourceID, SourceResponse } from "@shared/types"
|
||||
import { getters } from "#/getters"
|
||||
import { getCacheTable } from "#/database/cache"
|
||||
import type { CacheInfo } from "#/types"
|
||||
|
||||
export default defineEventHandler(async (event): Promise<SourceResponse> => {
|
||||
try {
|
||||
let id = getRouterParam(event, "id") as SourceID
|
||||
const query = getQuery(event)
|
||||
const latest = query.latest !== undefined && query.latest !== "false"
|
||||
const isValid = (id: SourceID) => !id || !sources[id] || !getters[id]
|
||||
|
||||
if (isValid(id)) {
|
||||
const redirectID = sources?.[id]?.redirect
|
||||
if (redirectID) id = redirectID
|
||||
if (isValid(id)) throw new Error("Invalid source id")
|
||||
}
|
||||
|
||||
const cacheTable = await getCacheTable()
|
||||
const now = Date.now()
|
||||
let cache: CacheInfo
|
||||
if (cacheTable) {
|
||||
cache = await cacheTable.get(id)
|
||||
if (cache) {
|
||||
// interval 刷新间隔,对于缓存失效也要执行的。本质上表示本来内容更新就很慢,这个间隔内可能内容压根不会更新。
|
||||
// 默认 10 分钟,是低于 TTL 的,但部分 Source 的更新间隔会超过 TTL,甚至有的一天更新一次。
|
||||
const interval = sources[id].interval
|
||||
if (now - cache.updated < interval) {
|
||||
return {
|
||||
status: "success",
|
||||
id,
|
||||
updatedTime: now,
|
||||
items: cache.data,
|
||||
}
|
||||
}
|
||||
|
||||
// 而 TTL 缓存失效时间,在时间范围内,就算内容更新了也要用这个缓存。
|
||||
// 复用缓存是不会更新时间的。
|
||||
if (now - cache.updated < TTL) {
|
||||
// 有 latest
|
||||
// 没有 latest,但服务器禁止登录
|
||||
|
||||
// 没有 latest
|
||||
// 有 latest,服务器可以登录但没有登录
|
||||
if (!latest || (!event.context.disabledLogin && !event.context.user)) {
|
||||
return {
|
||||
status: "cache",
|
||||
id,
|
||||
updatedTime: cache.updated,
|
||||
items: cache.data,
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
try {
|
||||
const newData = (await getters[id]()).slice(0, 30)
|
||||
if (cacheTable && newData) {
|
||||
if (event.context.waitUntil) event.context.waitUntil(cacheTable.set(id, newData))
|
||||
else await cacheTable.set(id, newData)
|
||||
}
|
||||
logger.success(`fetch ${id} latest`)
|
||||
return {
|
||||
status: "success",
|
||||
id,
|
||||
updatedTime: now,
|
||||
items: newData,
|
||||
}
|
||||
} catch (e) {
|
||||
if (cache!) {
|
||||
return {
|
||||
status: "cache",
|
||||
id,
|
||||
updatedTime: cache.updated,
|
||||
items: cache.data,
|
||||
}
|
||||
} else {
|
||||
throw e
|
||||
}
|
||||
}
|
||||
} catch (e: any) {
|
||||
logger.error(e)
|
||||
throw createError({
|
||||
statusCode: 500,
|
||||
message: e instanceof Error ? e.message : "Internal Server Error",
|
||||
})
|
||||
}
|
||||
})
|
22
server/api/s/entire.post.ts
Normal file
22
server/api/s/entire.post.ts
Normal file
@ -0,0 +1,22 @@
|
||||
import type { SourceID, SourceResponse } from "@shared/types"
|
||||
import { getCacheTable } from "#/database/cache"
|
||||
|
||||
export default defineEventHandler(async (event) => {
|
||||
try {
|
||||
const { sources: _ }: { sources: SourceID[] } = await readBody(event)
|
||||
const cacheTable = await getCacheTable()
|
||||
const ids = _?.filter(k => sources[k])
|
||||
if (ids?.length && cacheTable) {
|
||||
const caches = await cacheTable.getEntire(ids)
|
||||
const now = Date.now()
|
||||
return caches.map(cache => ({
|
||||
status: "cache",
|
||||
id: cache.id,
|
||||
items: cache.items,
|
||||
updatedTime: now - cache.updated < sources[cache.id].interval ? now : cache.updated,
|
||||
})) as SourceResponse[]
|
||||
}
|
||||
} catch {
|
||||
//
|
||||
}
|
||||
})
|
@ -1,14 +0,0 @@
|
||||
import { getCacheTable } from "#/database/cache"
|
||||
|
||||
export default defineEventHandler(async (event) => {
|
||||
try {
|
||||
const { sources } = await readBody(event)
|
||||
const cacheTable = await getCacheTable()
|
||||
if (sources && cacheTable) {
|
||||
const data = await cacheTable.getEntries(sources)
|
||||
return data
|
||||
}
|
||||
} catch {
|
||||
//
|
||||
}
|
||||
})
|
@ -17,20 +17,21 @@ export default defineEventHandler(async (event): Promise<SourceResponse> => {
|
||||
}
|
||||
|
||||
const cacheTable = await getCacheTable()
|
||||
// Date.now() in Cloudflare Worker will not update throughout the entire runtime.
|
||||
const now = Date.now()
|
||||
let cache: CacheInfo
|
||||
let cache: CacheInfo | undefined
|
||||
if (cacheTable) {
|
||||
cache = await cacheTable.get(id)
|
||||
if (cache) {
|
||||
// if (cache) {
|
||||
// interval 刷新间隔,对于缓存失效也要执行的。本质上表示本来内容更新就很慢,这个间隔内可能内容压根不会更新。
|
||||
// 默认 10 分钟,是低于 TTL 的,但部分 Source 的更新间隔会超过 TTL,甚至有的一天更新一次。
|
||||
const interval = sources[id].interval
|
||||
if (now - cache.updated < interval) {
|
||||
if (now - cache.updated < sources[id].interval) {
|
||||
return {
|
||||
status: "success",
|
||||
id,
|
||||
updatedTime: now,
|
||||
items: cache.data,
|
||||
items: cache.items,
|
||||
}
|
||||
}
|
||||
|
||||
@ -47,7 +48,7 @@ export default defineEventHandler(async (event): Promise<SourceResponse> => {
|
||||
status: "cache",
|
||||
id,
|
||||
updatedTime: cache.updated,
|
||||
items: cache.data,
|
||||
items: cache.items,
|
||||
}
|
||||
}
|
||||
}
|
||||
@ -56,7 +57,7 @@ export default defineEventHandler(async (event): Promise<SourceResponse> => {
|
||||
|
||||
try {
|
||||
const newData = (await getters[id]()).slice(0, 30)
|
||||
if (cacheTable && newData) {
|
||||
if (cacheTable && newData.length) {
|
||||
if (event.context.waitUntil) event.context.waitUntil(cacheTable.set(id, newData))
|
||||
else await cacheTable.set(id, newData)
|
||||
}
|
||||
@ -73,7 +74,7 @@ export default defineEventHandler(async (event): Promise<SourceResponse> => {
|
||||
status: "cache",
|
||||
id,
|
||||
updatedTime: cache.updated,
|
||||
items: cache.data,
|
||||
items: cache.items,
|
||||
}
|
||||
} else {
|
||||
throw e
|
||||
|
@ -1,7 +1,7 @@
|
||||
import process from "node:process"
|
||||
import type { NewsItem } from "@shared/types"
|
||||
import type { Database } from "db0"
|
||||
import type { CacheInfo } from "../types"
|
||||
import type { CacheInfo, CacheRow } from "../types"
|
||||
|
||||
export class Cache {
|
||||
private db
|
||||
@ -28,27 +28,22 @@ export class Cache {
|
||||
logger.success(`set ${key} cache`)
|
||||
}
|
||||
|
||||
async get(key: string): Promise<CacheInfo> {
|
||||
const row: any = await this.db.prepare(`SELECT id, data, updated FROM cache WHERE id = ?`).get(key)
|
||||
const r = row
|
||||
? {
|
||||
...row,
|
||||
data: JSON.parse(row.data),
|
||||
}
|
||||
: undefined
|
||||
logger.success(`get ${key} cache`)
|
||||
return r
|
||||
async get(key: string): Promise<CacheInfo | undefined > {
|
||||
const row = (await this.db.prepare(`SELECT id, data, updated FROM cache WHERE id = ?`).get(key)) as CacheRow | undefined
|
||||
if (row) {
|
||||
logger.success(`get ${key} cache`)
|
||||
return {
|
||||
id: row.id,
|
||||
updated: row.updated,
|
||||
items: JSON.parse(row.data),
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
async getEntries(keys: string[]) {
|
||||
async getEntire(keys: string[]): Promise<CacheInfo[]> {
|
||||
const keysStr = keys.map(k => `id = '${k}'`).join(" or ")
|
||||
const res = await this.db.prepare(`SELECT id, data, updated FROM cache WHERE ${keysStr}`).all() as any
|
||||
|
||||
const rows = (res.results ?? res) as {
|
||||
id: SourceID
|
||||
data: string
|
||||
updated: number
|
||||
}[]
|
||||
const rows = (res.results ?? res) as CacheRow[]
|
||||
|
||||
/**
|
||||
* https://developers.cloudflare.com/d1/build-with-d1/d1-client-api/#return-object
|
||||
@ -60,12 +55,14 @@ export class Cache {
|
||||
* }
|
||||
*/
|
||||
if (rows?.length) {
|
||||
logger.success(`get entries cache`)
|
||||
return Object.fromEntries(rows.map(row => [row.id, {
|
||||
logger.success(`get entire (...) cache`)
|
||||
return rows.map(row => ({
|
||||
id: row.id,
|
||||
updatedTime: row.updated,
|
||||
updated: row.updated,
|
||||
items: JSON.parse(row.data) as NewsItem[],
|
||||
}]))
|
||||
}))
|
||||
} else {
|
||||
return []
|
||||
}
|
||||
}
|
||||
|
||||
|
@ -17,7 +17,13 @@ export interface RSSItem {
|
||||
|
||||
export interface CacheInfo {
|
||||
id: SourceID
|
||||
data: NewsItem[]
|
||||
items: NewsItem[]
|
||||
updated: number
|
||||
}
|
||||
|
||||
export interface CacheRow {
|
||||
id: SourceID
|
||||
data: string
|
||||
updated: number
|
||||
}
|
||||
|
||||
|
@ -160,6 +160,8 @@ export const originSources = {
|
||||
},
|
||||
depth: {
|
||||
title: "深度头条",
|
||||
// invalid, not way to get
|
||||
disable: true,
|
||||
interval: Time.Common,
|
||||
},
|
||||
},
|
||||
|
@ -108,5 +108,3 @@ export interface SourceResponse {
|
||||
updatedTime: number | string
|
||||
items: NewsItem[]
|
||||
}
|
||||
|
||||
export type EntriesSourceResponse = Partial<Record<SourceID, SourceResponse>>
|
||||
|
@ -1,12 +1,11 @@
|
||||
import type { NewsItem, SourceID, SourceResponse } from "@shared/types"
|
||||
import { useQuery } from "@tanstack/react-query"
|
||||
import { AnimatePresence, motion } from "framer-motion"
|
||||
import { AnimatePresence, motion, useInView } from "framer-motion"
|
||||
import type { SyntheticListenerMap } from "@dnd-kit/core/dist/hooks/utilities"
|
||||
import { useWindowSize } from "react-use"
|
||||
import { forwardRef, useImperativeHandle } from "react"
|
||||
import { OverlayScrollbar } from "../common/overlay-scrollbar"
|
||||
import { safeParseString } from "~/utils"
|
||||
import { cache } from "~/utils/cache"
|
||||
|
||||
export interface ItemsProps extends React.HTMLAttributes<HTMLDivElement> {
|
||||
id: SourceID
|
||||
@ -25,6 +24,10 @@ interface NewsCardProps {
|
||||
export const CardWrapper = forwardRef<HTMLDivElement, ItemsProps>(({ id, isDragged, handleListeners, style, ...props }, dndRef) => {
|
||||
const ref = useRef<HTMLDivElement>(null)
|
||||
|
||||
const inView = useInView(ref, {
|
||||
once: true,
|
||||
})
|
||||
|
||||
useImperativeHandle(dndRef, () => ref.current!)
|
||||
|
||||
return (
|
||||
@ -42,55 +45,63 @@ export const CardWrapper = forwardRef<HTMLDivElement, ItemsProps>(({ id, isDragg
|
||||
}}
|
||||
{...props}
|
||||
>
|
||||
<NewsCard id={id} handleListeners={handleListeners} />
|
||||
{inView && <NewsCard id={id} handleListeners={handleListeners} />}
|
||||
</div>
|
||||
)
|
||||
})
|
||||
|
||||
function NewsCard({ id, handleListeners }: NewsCardProps) {
|
||||
const { refresh, getRefreshId } = useRefetch()
|
||||
const { data, isFetching, isPlaceholderData, isError } = useQuery({
|
||||
queryKey: [id, getRefreshId(id)],
|
||||
const { refresh } = useRefetch()
|
||||
const { data, isFetching, isError } = useQuery({
|
||||
queryKey: ["source", id],
|
||||
queryFn: async ({ queryKey }) => {
|
||||
const [_id, _refetchTime] = queryKey as [SourceID, number]
|
||||
let url = `/s?id=${_id}`
|
||||
const id = queryKey[1] as SourceID
|
||||
let url = `/s?id=${id}`
|
||||
const headers: Record<string, any> = {}
|
||||
if (Date.now() - _refetchTime < 1000) {
|
||||
url = `/s?id=${_id}&latest`
|
||||
if (refetchSources.has(id)) {
|
||||
url = `/s?id=${id}&latest`
|
||||
const jwt = safeParseString(localStorage.getItem("jwt"))
|
||||
if (jwt) headers.Authorization = `Bearer ${jwt}`
|
||||
} else if (cache.has(_id)) {
|
||||
return cache.get(_id)
|
||||
refetchSources.delete(id)
|
||||
} else if (cacheSources.has(id)) {
|
||||
// wait animation
|
||||
await delay(200)
|
||||
return cacheSources.get(id)
|
||||
}
|
||||
|
||||
const response: SourceResponse = await myFetch(url, {
|
||||
headers,
|
||||
})
|
||||
|
||||
try {
|
||||
if (response.items && sources[_id].type === "hottest" && cache.has(_id)) {
|
||||
response.items.forEach((item, i) => {
|
||||
const o = cache.get(_id)!.items.findIndex(k => k.id === item.id)
|
||||
item.extra = {
|
||||
...item?.extra,
|
||||
diff: o === -1 ? undefined : o - i,
|
||||
}
|
||||
})
|
||||
function diff() {
|
||||
try {
|
||||
if (response.items && sources[id].type === "hottest" && cacheSources.has(id)) {
|
||||
response.items.forEach((item, i) => {
|
||||
const o = cacheSources.get(id)!.items.findIndex(k => k.id === item.id)
|
||||
item.extra = {
|
||||
...item?.extra,
|
||||
diff: o === -1 ? undefined : o - i,
|
||||
}
|
||||
})
|
||||
}
|
||||
} catch (e) {
|
||||
console.error(e)
|
||||
}
|
||||
} catch (e) {
|
||||
console.log(e)
|
||||
}
|
||||
|
||||
cache.set(_id, response)
|
||||
diff()
|
||||
|
||||
cacheSources.set(id, response)
|
||||
return response
|
||||
},
|
||||
placeholderData: prev => prev,
|
||||
staleTime: 1000 * 60 * 1,
|
||||
staleTime: Infinity,
|
||||
refetchOnMount: false,
|
||||
refetchOnReconnect: false,
|
||||
refetchOnWindowFocus: false,
|
||||
retry: false,
|
||||
})
|
||||
|
||||
const isFreshFetching = useMemo(() => isFetching && !isPlaceholderData, [isFetching, isPlaceholderData])
|
||||
|
||||
const { isFocused, toggleFocus } = useFocusWith(id)
|
||||
|
||||
return (
|
||||
@ -143,7 +154,7 @@ function NewsCard({ id, handleListeners }: NewsCardProps) {
|
||||
<OverlayScrollbar
|
||||
className={$([
|
||||
"h-full p-2 overflow-y-auto rounded-2xl bg-base bg-op-70!",
|
||||
isFreshFetching && `animate-pulse`,
|
||||
isFetching && `animate-pulse`,
|
||||
`sprinkle-${sources[id].color}`,
|
||||
])}
|
||||
options={{
|
||||
@ -151,7 +162,7 @@ function NewsCard({ id, handleListeners }: NewsCardProps) {
|
||||
}}
|
||||
defer={false}
|
||||
>
|
||||
<div className={$("transition-opacity-500", isFreshFetching && "op-20")}>
|
||||
<div className={$("transition-opacity-500", isFetching && "op-20")}>
|
||||
{!!data?.items?.length && (sources[id].type === "hottest" ? <NewsListHot items={data.items} /> : <NewsListTimeLine items={data.items} />)}
|
||||
</div>
|
||||
</OverlayScrollbar>
|
||||
|
@ -17,34 +17,13 @@ import { SortableContext, arrayMove, defaultAnimateLayoutChanges, rectSortingStr
|
||||
import type { SourceID } from "@shared/types"
|
||||
import { CSS } from "@dnd-kit/utilities"
|
||||
import { motion } from "framer-motion"
|
||||
import { useQuery } from "@tanstack/react-query"
|
||||
import type { ItemsProps } from "./card"
|
||||
import { CardWrapper } from "./card"
|
||||
import { currentSourcesAtom } from "~/atoms"
|
||||
|
||||
export function Dnd() {
|
||||
const [items, setItems] = useAtom(currentSourcesAtom)
|
||||
useQuery({
|
||||
// sort in place
|
||||
queryKey: ["entries", [...items].sort()],
|
||||
queryFn: async ({ queryKey }) => {
|
||||
const sources = queryKey[1]
|
||||
const res: EntriesSourceResponse = await myFetch("/s/entries", {
|
||||
method: "POST",
|
||||
body: {
|
||||
sources,
|
||||
},
|
||||
})
|
||||
if (res) {
|
||||
for (const [k, v] of Object.entries(res)) {
|
||||
cache.set(k as SourceID, v)
|
||||
}
|
||||
return res
|
||||
}
|
||||
return null
|
||||
},
|
||||
staleTime: 1000 * 60 * 5,
|
||||
})
|
||||
useEntireQuery(items)
|
||||
|
||||
return (
|
||||
<DndWrapper items={items} setItems={setItems}>
|
||||
@ -75,8 +54,13 @@ export function Dnd() {
|
||||
type: "tween",
|
||||
}}
|
||||
variants={{
|
||||
hidden: { y: 20, opacity: 0 },
|
||||
hidden: {
|
||||
y: 20,
|
||||
opacity: 0,
|
||||
display: "none",
|
||||
},
|
||||
visible: {
|
||||
display: "block",
|
||||
y: 0,
|
||||
opacity: 1,
|
||||
},
|
||||
|
@ -11,6 +11,7 @@ export function Column({ id }: { id: FixedColumnID }) {
|
||||
}, [id, setCurrentColumnID])
|
||||
|
||||
useTitle(`NewsNow | ${metadata[id].name}`)
|
||||
|
||||
return (
|
||||
<>
|
||||
<div className="flex justify-center md:hidden mb-6">
|
||||
|
@ -24,7 +24,8 @@ function Refresh() {
|
||||
|
||||
const isFetching = useIsFetching({
|
||||
predicate: (query) => {
|
||||
return currentSources.includes(query.queryKey[0] as SourceID)
|
||||
const [type, id] = query.queryKey as ["source" | "entire", SourceID]
|
||||
return (type === "source" && currentSources.includes(id)) || type === "entire"
|
||||
},
|
||||
})
|
||||
|
||||
|
53
src/hooks/query.ts
Normal file
53
src/hooks/query.ts
Normal file
@ -0,0 +1,53 @@
|
||||
import { useQuery, useQueryClient } from "@tanstack/react-query"
|
||||
import type { SourceID, SourceResponse } from "@shared/types"
|
||||
|
||||
export function useUpdateQuery() {
|
||||
const queryClient = useQueryClient()
|
||||
|
||||
/**
|
||||
* update query
|
||||
*/
|
||||
return useCallback(async (...sources: SourceID[]) => {
|
||||
await queryClient.refetchQueries({
|
||||
predicate: (query) => {
|
||||
const [type, id] = query.queryKey as ["source" | "entire", SourceID]
|
||||
return type === "source" && sources.includes(id)
|
||||
},
|
||||
})
|
||||
}, [queryClient])
|
||||
}
|
||||
|
||||
export function useEntireQuery(items: SourceID[]) {
|
||||
const update = useUpdateQuery()
|
||||
useQuery({
|
||||
// sort in place
|
||||
queryKey: ["entire", [...items].sort()],
|
||||
queryFn: async ({ queryKey }) => {
|
||||
const sources = queryKey[1]
|
||||
if (sources.length === 0) return null
|
||||
const res: SourceResponse[] | undefined = await myFetch("/s/entire", {
|
||||
method: "POST",
|
||||
body: {
|
||||
sources,
|
||||
},
|
||||
})
|
||||
if (res?.length) {
|
||||
const s = [] as SourceID[]
|
||||
res.forEach((v) => {
|
||||
const id = v.id
|
||||
if (!cacheSources.has(id) || cacheSources.get(id)!.updatedTime < v.updatedTime) {
|
||||
s.push(id)
|
||||
cacheSources.set(id, v)
|
||||
}
|
||||
})
|
||||
// update now
|
||||
update(...s)
|
||||
|
||||
return res
|
||||
}
|
||||
return null
|
||||
},
|
||||
staleTime: 1000 * 60 * 3,
|
||||
retry: false,
|
||||
})
|
||||
}
|
@ -18,7 +18,7 @@ enableLoginAtom.onMount = (set) => {
|
||||
}).catch((e) => {
|
||||
if (e.statusCode === 506) {
|
||||
set({ enable: false })
|
||||
console.log("clear")
|
||||
localStorage.removeItem("jwt")
|
||||
}
|
||||
})
|
||||
}
|
||||
|
@ -1,15 +1,25 @@
|
||||
import { useBeforeUnload, useMount } from "react-use"
|
||||
|
||||
const KEY = "unload-time"
|
||||
export function isPageReload() {
|
||||
const _ = localStorage.getItem(KEY)
|
||||
if (!_) return false
|
||||
const unloadTime = Number(_)
|
||||
if (!Number.isNaN(unloadTime) && Date.now() - unloadTime < 1000) {
|
||||
return true
|
||||
}
|
||||
localStorage.removeItem(KEY)
|
||||
return false
|
||||
}
|
||||
|
||||
export function useOnReload(fn?: () => Promise<void> | void, fallback?: () => Promise<void> | void) {
|
||||
useBeforeUnload(() => {
|
||||
localStorage.setItem("quitTime", Date.now().toString())
|
||||
localStorage.setItem(KEY, Date.now().toString())
|
||||
return false
|
||||
})
|
||||
|
||||
useMount(() => {
|
||||
const _ = localStorage.getItem("quitTime")
|
||||
const quitTime = _ ? Number(_) : 0
|
||||
if (!Number.isNaN(quitTime) && Date.now() - quitTime < 1000) {
|
||||
if (isPageReload()) {
|
||||
fn?.()
|
||||
} else {
|
||||
fallback?.()
|
||||
|
@ -1,24 +1,13 @@
|
||||
import type { SourceID } from "@shared/types"
|
||||
import { useUpdateQuery } from "./query"
|
||||
|
||||
function initRefetchSources() {
|
||||
let time = 0
|
||||
// useOnReload
|
||||
// 没有放在 useOnReload 里面, 可以避免初始化后再修改 refetchSourceAtom,导致多次请求 API
|
||||
const _ = localStorage.getItem("quitTime")
|
||||
const now = Date.now()
|
||||
const quitTime = _ ? Number(_) : 0
|
||||
if (!Number.isNaN(quitTime) && now - quitTime < 1000) {
|
||||
time = now
|
||||
}
|
||||
return Object.fromEntries(Object.keys(sources).map(k => [k, time])) as Record<SourceID, number>
|
||||
}
|
||||
|
||||
const refetchSourcesAtom = atom(initRefetchSources())
|
||||
export function useRefetch() {
|
||||
const [refetchSource, setRefetchSource] = useAtom(refetchSourcesAtom)
|
||||
const { enableLogin, loggedIn, login } = useLogin()
|
||||
const toaster = useToast()
|
||||
|
||||
const updateQuery = useUpdateQuery()
|
||||
/**
|
||||
* force refresh
|
||||
*/
|
||||
const refresh = useCallback((...sources: SourceID[]) => {
|
||||
if (enableLogin && !loggedIn) {
|
||||
toaster("登录后可以强制拉取最新数据", {
|
||||
@ -29,18 +18,14 @@ export function useRefetch() {
|
||||
},
|
||||
})
|
||||
} else {
|
||||
const obj = Object.fromEntries(sources.map(id => [id, Date.now()]))
|
||||
setRefetchSource(prev => ({
|
||||
...prev,
|
||||
...obj,
|
||||
}))
|
||||
refetchSources.clear()
|
||||
sources.forEach(id => refetchSources.add(id))
|
||||
updateQuery(...sources)
|
||||
}
|
||||
}, [setRefetchSource, loggedIn, toaster, login, enableLogin])
|
||||
|
||||
const getRefreshId = useCallback((id: SourceID) => refetchSource[id], [refetchSource])
|
||||
}, [loggedIn, toaster, login, enableLogin, updateQuery])
|
||||
|
||||
return {
|
||||
refresh,
|
||||
getRefreshId,
|
||||
refetchSources,
|
||||
}
|
||||
}
|
||||
|
@ -1,3 +0,0 @@
|
||||
import type { SourceID, SourceResponse } from "@shared/types"
|
||||
|
||||
export const cache: Map<SourceID, SourceResponse> = new Map()
|
4
src/utils/data.ts
Normal file
4
src/utils/data.ts
Normal file
@ -0,0 +1,4 @@
|
||||
import type { SourceID, SourceResponse } from "@shared/types"
|
||||
|
||||
export const cacheSources = new Map<SourceID, SourceResponse>()
|
||||
export const refetchSources = new Set<SourceID>()
|
Loading…
x
Reference in New Issue
Block a user