@@ -142,7 +142,7 @@ const slice = createSlice({
142
142
state . result . queryId = chunk . meta . query_id ;
143
143
state . result . data . traceId = chunk . meta . trace_id ;
144
144
} ,
145
- addStreamingChunks : ( state , action : PayloadAction < StreamDataChunk [ ] > ) => {
145
+ addStreamingChunk : ( state , action : PayloadAction < StreamDataChunk > ) => {
146
146
if ( ! state . result ) {
147
147
return ;
148
148
}
@@ -151,7 +151,6 @@ const slice = createSlice({
151
151
state . result . data = prepareQueryData ( null ) ;
152
152
}
153
153
154
- // Initialize speed metrics if not present
155
154
if ( ! state . result . speedMetrics ) {
156
155
state . result . speedMetrics = {
157
156
rowsPerSecond : 0 ,
@@ -161,45 +160,27 @@ const slice = createSlice({
161
160
}
162
161
163
162
const currentTime = Date . now ( ) ;
164
- let totalNewRows = 0 ;
165
-
166
- const mergedStreamDataChunks = new Map < number , StreamDataChunk > ( ) ;
167
- for ( const chunk of action . payload ) {
168
- const currentMergedChunk = mergedStreamDataChunks . get ( chunk . meta . result_index ) ;
169
- const chunkRowCount = ( chunk . result . rows || [ ] ) . length ;
170
- totalNewRows += chunkRowCount ;
171
-
172
- if ( currentMergedChunk ) {
173
- if ( ! currentMergedChunk . result . rows ) {
174
- currentMergedChunk . result . rows = [ ] ;
175
- }
176
- for ( const row of chunk . result . rows || [ ] ) {
177
- currentMergedChunk . result . rows . push ( row ) ;
178
- }
179
- } else {
180
- mergedStreamDataChunks . set ( chunk . meta . result_index , chunk ) ;
181
- }
182
- }
163
+ const chunk = action . payload ;
183
164
184
165
// Update speed metrics
185
166
const metrics = state . result . speedMetrics ;
186
167
metrics . recentChunks . push ( {
187
168
timestamp : currentTime ,
188
- rowCount : totalNewRows ,
169
+ rowCount : chunk . result . rows ?. length || 0 ,
189
170
} ) ;
190
171
191
172
// Keep only chunks from the last 5 seconds
192
173
const WINDOW_SIZE = 5000 ; // 5 seconds in milliseconds
193
174
metrics . recentChunks = metrics . recentChunks . filter (
194
- ( chunk ) => currentTime - chunk . timestamp <= WINDOW_SIZE ,
175
+ ( _chunk ) => currentTime - _chunk . timestamp <= WINDOW_SIZE ,
195
176
) ;
196
177
197
178
// Calculate moving average
198
179
if ( metrics . recentChunks . length > 0 ) {
199
180
const oldestChunkTime = metrics . recentChunks [ 0 ] . timestamp ;
200
181
const timeWindow = ( currentTime - oldestChunkTime ) / 1000 ; // Convert to seconds
201
182
const totalRows = metrics . recentChunks . reduce (
202
- ( sum , chunk ) => sum + chunk . rowCount ,
183
+ ( sum , _chunk ) => sum + _chunk . rowCount ,
203
184
0 ,
204
185
) ;
205
186
metrics . rowsPerSecond = timeWindow > 0 ? totalRows / timeWindow : 0 ;
@@ -210,40 +191,38 @@ const slice = createSlice({
210
191
if ( ! state . result . data . resultSets ) {
211
192
state . result . data . resultSets = [ ] ;
212
193
}
194
+ const resultIndex = chunk . meta . result_index ;
213
195
214
- for ( const [ resultIndex , chunk ] of mergedStreamDataChunks . entries ( ) ) {
215
- const { columns, rows} = chunk . result ;
196
+ const { columns, rows} = chunk . result ;
216
197
217
- if ( ! state . result . data . resultSets [ resultIndex ] ) {
218
- state . result . data . resultSets [ resultIndex ] = {
219
- columns : [ ] ,
220
- result : [ ] ,
221
- } ;
222
- }
198
+ if ( ! state . result . data . resultSets [ resultIndex ] ) {
199
+ state . result . data . resultSets [ resultIndex ] = {
200
+ columns : [ ] ,
201
+ result : [ ] ,
202
+ } ;
203
+ }
223
204
224
- if ( columns && ! state . result . data . resultSets [ resultIndex ] . columns ?. length ) {
225
- state . result . data . resultSets [ resultIndex ] . columns ?. push ( INDEX_COLUMN ) ;
226
- for ( const column of columns ) {
227
- state . result . data . resultSets [ resultIndex ] . columns ?. push ( column ) ;
228
- }
205
+ if ( columns && ! state . result . data . resultSets [ resultIndex ] . columns ?. length ) {
206
+ state . result . data . resultSets [ resultIndex ] . columns ?. push ( INDEX_COLUMN ) ;
207
+ for ( const column of columns ) {
208
+ state . result . data . resultSets [ resultIndex ] . columns ?. push ( column ) ;
229
209
}
210
+ }
230
211
231
- const indexedRows = rows || [ ] ;
232
- const startIndex =
233
- state . result ?. data ?. resultSets ?. [ resultIndex ] . result ?. length || 1 ;
212
+ const indexedRows = rows || [ ] ;
213
+ const startIndex = state . result ?. data ?. resultSets ?. [ resultIndex ] . result ?. length || 1 ;
234
214
235
- indexedRows . forEach ( ( row , index ) => {
236
- row . unshift ( startIndex + index ) ;
237
- } ) ;
215
+ indexedRows . forEach ( ( row , index ) => {
216
+ row . unshift ( startIndex + index ) ;
217
+ } ) ;
238
218
239
- const formattedRows = parseResult (
240
- indexedRows ,
241
- state . result . data . resultSets [ resultIndex ] . columns || [ ] ,
242
- ) ;
219
+ const formattedRows = parseResult (
220
+ indexedRows ,
221
+ state . result . data . resultSets [ resultIndex ] . columns || [ ] ,
222
+ ) ;
243
223
244
- for ( const row of formattedRows ) {
245
- state . result . data . resultSets [ resultIndex ] . result ?. push ( row ) ;
246
- }
224
+ for ( const row of formattedRows ) {
225
+ state . result . data . resultSets [ resultIndex ] . result ?. push ( row ) ;
247
226
}
248
227
} ,
249
228
setStreamQueryResponse : ( state , action : PayloadAction < QueryResponseChunk > ) => {
@@ -302,7 +281,7 @@ export const {
302
281
goToNextQuery,
303
282
setTenantPath,
304
283
setQueryHistoryFilter,
305
- addStreamingChunks ,
284
+ addStreamingChunk ,
306
285
setStreamQueryResponse,
307
286
setStreamSession,
308
287
} = slice . actions ;
@@ -348,17 +327,6 @@ export const queryApi = api.injectEndpoints({
348
327
) ;
349
328
350
329
try {
351
- let streamDataChunkBatch : StreamDataChunk [ ] = [ ] ;
352
- let batchTimeout : number | null = null ;
353
-
354
- const flushBatch = ( ) => {
355
- if ( streamDataChunkBatch . length > 0 ) {
356
- dispatch ( addStreamingChunks ( streamDataChunkBatch ) ) ;
357
- streamDataChunkBatch = [ ] ;
358
- }
359
- batchTimeout = null ;
360
- } ;
361
-
362
330
await window . api . streaming . streamQuery (
363
331
{
364
332
query,
@@ -390,20 +358,11 @@ export const queryApi = api.injectEndpoints({
390
358
dispatch ( setStreamSession ( chunk ) ) ;
391
359
} ,
392
360
onStreamDataChunk : ( chunk ) => {
393
- streamDataChunkBatch . push ( chunk ) ;
394
- if ( ! batchTimeout ) {
395
- batchTimeout = window . requestAnimationFrame ( flushBatch ) ;
396
- }
361
+ dispatch ( addStreamingChunk ( chunk ) ) ;
397
362
} ,
398
363
} ,
399
364
) ;
400
365
401
- // Flush any remaining chunks
402
- if ( batchTimeout ) {
403
- window . cancelAnimationFrame ( batchTimeout ) ;
404
- flushBatch ( ) ;
405
- }
406
-
407
366
return { data : null } ;
408
367
} catch ( error ) {
409
368
const state = getState ( ) as RootState ;
0 commit comments