1
1
import { Region , RegionsResponse } from "api/typesGenerated"
2
- import { useEffect , useReducer } from "react"
2
+ import { useEffect , useReducer , useState } from "react"
3
3
import PerformanceObserver from "@fastly/performance-observer-polyfill"
4
4
import axios from "axios"
5
5
import { generateRandomString } from "utils/random"
@@ -25,20 +25,31 @@ const proxyLatenciesReducer = (
25
25
action : ProxyLatencyAction ,
26
26
) : Record < string , ProxyLatencyReport > => {
27
27
// Just overwrite any existing latency.
28
- return {
29
- ...state ,
30
- [ action . proxyID ] : action . report ,
31
- }
28
+ state [ action . proxyID ] = action . report
29
+ return state
32
30
}
33
31
34
32
export const useProxyLatency = (
35
33
proxies ?: RegionsResponse ,
36
- ) : Record < string , ProxyLatencyReport > => {
34
+ ) : {
35
+ // Refetch can be called to refetch the proxy latencies.
36
+ // Until the new values are loaded, the old values will still be used.
37
+ refetch : ( ) => void
38
+ proxyLatencies : Record < string , ProxyLatencyReport >
39
+ } => {
37
40
const [ proxyLatencies , dispatchProxyLatencies ] = useReducer (
38
41
proxyLatenciesReducer ,
39
42
{ } ,
40
43
)
41
44
45
+ // This latestFetchRequest is used to trigger a refetch of the proxy latencies.
46
+ const [ latestFetchRequest , setLatestFetchRequest ] = useState (
47
+ new Date ( ) . toISOString ( ) ,
48
+ )
49
+ const refetch = ( ) => {
50
+ setLatestFetchRequest ( new Date ( ) . toISOString ( ) )
51
+ }
52
+
42
53
// Only run latency updates when the proxies change.
43
54
useEffect ( ( ) => {
44
55
if ( ! proxies ) {
@@ -148,7 +159,10 @@ export const useProxyLatency = (
148
159
// via the performance observer. So we can disconnect the observer.
149
160
observer . disconnect ( )
150
161
} )
151
- } , [ proxies ] )
162
+ } , [ proxies , latestFetchRequest ] )
152
163
153
- return proxyLatencies
164
+ return {
165
+ proxyLatencies,
166
+ refetch,
167
+ }
154
168
}
0 commit comments