1
1
import { useQuery } from "@tanstack/react-query"
2
2
import { getWorkspaceProxies } from "api/api"
3
3
import { Region } from "api/typesGenerated"
4
- import axios from "axios"
5
4
import { useDashboard } from "components/Dashboard/DashboardProvider"
6
- import { PerformanceObserver } from "perf_hooks "
5
+ import PerformanceObserver from "@fastly/performance-observer-polyfill "
7
6
import {
8
7
createContext ,
9
8
FC ,
10
9
PropsWithChildren ,
11
10
useContext ,
12
11
useEffect ,
12
+ useReducer ,
13
13
useState ,
14
14
} from "react"
15
+ import axios from "axios"
15
16
16
17
interface ProxyContextValue {
17
18
proxy : PreferredProxy
@@ -43,6 +44,20 @@ export const ProxyContext = createContext<ProxyContextValue | undefined>(
43
44
undefined ,
44
45
)
45
46
47
+ interface ProxyLatencyAction {
48
+ proxyID : string
49
+ latencyMS : number
50
+ }
51
+
52
+ const proxyLatenciesReducer = (
53
+ state : Record < string , number > ,
54
+ action : ProxyLatencyAction ,
55
+ ) : Record < string , number > => {
56
+ // Just overwrite any existing latency.
57
+ state [ action . proxyID ] = action . latencyMS
58
+ return state
59
+ }
60
+
46
61
/**
47
62
* ProxyProvider interacts with local storage to indicate the preferred workspace proxy.
48
63
*/
@@ -57,9 +72,10 @@ export const ProxyProvider: FC<PropsWithChildren> = ({ children }) => {
57
72
}
58
73
59
74
const [ proxy , setProxy ] = useState < PreferredProxy > ( savedProxy )
60
- const [ proxyLatenciesMS , setProxyLatenciesMS ] = useState <
61
- Record < string , number >
62
- > ( { } )
75
+ const [ proxyLatenciesMS , dispatchProxyLatenciesMS ] = useReducer (
76
+ proxyLatenciesReducer ,
77
+ { } ,
78
+ )
63
79
64
80
const dashboard = useDashboard ( )
65
81
const experimentEnabled = dashboard ?. experiments . includes ( "moons" )
@@ -87,14 +103,85 @@ export const ProxyProvider: FC<PropsWithChildren> = ({ children }) => {
87
103
return
88
104
}
89
105
90
- window . performance . getEntries ( ) . forEach ( ( entry ) => {
91
- console . log ( entry )
106
+ // proxyMap is a map of the proxy path_app_url to the proxy object.
107
+ // This is for the observer to know which requests are important to
108
+ // record.
109
+ const proxyChecks = proxiesResp . regions . reduce ( ( acc , proxy ) => {
110
+ if ( ! proxy . healthy ) {
111
+ return acc
112
+ }
113
+
114
+ const url = new URL ( "/healthz" , proxy . path_app_url )
115
+ acc [ url . toString ( ) ] = proxy
116
+ return acc
117
+ } , { } as Record < string , Region > )
118
+
119
+ // Start a new performance observer to record of all the requests
120
+ // to the proxies.
121
+ const observer = new PerformanceObserver ( ( list ) => {
122
+ list . getEntries ( ) . forEach ( ( entry ) => {
123
+ if ( entry . entryType !== "resource" ) {
124
+ // We should never get these, but just in case.
125
+ return
126
+ }
127
+
128
+ const check = proxyChecks [ entry . name ]
129
+ if ( ! check ) {
130
+ // This is not a proxy request.
131
+ return
132
+ }
133
+ // These docs are super useful.
134
+ // https://developer.mozilla.org/en-US/docs/Web/API/Performance_API/Resource_timing
135
+ // dispatchProxyLatenciesMS({
136
+ // proxyID: check.id,
137
+ // latencyMS: entry.duration,
138
+ // })
139
+
140
+ console . log ( "performance observer entry" , entry )
141
+ } )
142
+ console . log ( "performance observer" , list )
92
143
} )
93
- const observer = new PerformanceObserver ( ( list , observer ) => {
94
- console . log ( "performance observer" , list , observer )
144
+ // The resource requests include xmlhttp requests.
145
+ observer . observe ( { entryTypes : [ "resource" ] } )
146
+ axios
147
+ . get ( "https://dev.coder.com/healthz" )
148
+ . then ( ( resp ) => {
149
+ console . log ( resp )
150
+ } )
151
+ . catch ( ( err ) => {
152
+ console . log ( err )
153
+ } )
154
+
155
+ const proxyChecks = proxiesResp . regions . map ( ( proxy ) => {
156
+ // TODO: Move to /derp/latency-check
157
+ const url = new URL ( "/healthz" , proxy . path_app_url )
158
+ return axios
159
+ . get ( url . toString ( ) )
160
+ . then ( ( resp ) => {
161
+ return resp
162
+ } )
163
+ . catch ( ( err ) => {
164
+ return err
165
+ } )
166
+
167
+ // Add a random query param to ensure the request is not cached.
168
+ // url.searchParams.append("cache_bust", Math.random().toString())
95
169
} )
96
170
97
- observer . observe ( { entryTypes : [ "http2" , "http" ] } )
171
+ Promise . all ( [ proxyChecks ] )
172
+ . then ( ( resp ) => {
173
+ console . log ( resp )
174
+ console . log ( "done" , observer . takeRecords ( ) )
175
+ // observer.disconnect()
176
+ } )
177
+ . catch ( ( err ) => {
178
+ console . log ( err )
179
+ // observer.disconnect()
180
+ } )
181
+ . finally ( ( ) => {
182
+ console . log ( "finally" , observer . takeRecords ( ) )
183
+ // observer.disconnect()
184
+ } )
98
185
} , [ proxiesResp ] )
99
186
100
187
const setAndSaveProxy = (
0 commit comments