diff --git a/.changeset/shaggy-dryers-press.md b/.changeset/shaggy-dryers-press.md new file mode 100644 index 000000000..efc9755a5 --- /dev/null +++ b/.changeset/shaggy-dryers-press.md @@ -0,0 +1,5 @@ +--- +'@segment/analytics-next': minor +--- + +Flush large keepalive requests diff --git a/packages/browser/src/plugins/segmentio/__tests__/batched-dispatcher.test.ts b/packages/browser/src/plugins/segmentio/__tests__/batched-dispatcher.test.ts index d45ddd32a..869c9fc86 100644 --- a/packages/browser/src/plugins/segmentio/__tests__/batched-dispatcher.test.ts +++ b/packages/browser/src/plugins/segmentio/__tests__/batched-dispatcher.test.ts @@ -130,6 +130,31 @@ describe('Batching', () => { expect(fetch).toHaveBeenCalledTimes(1) }) + it('sends requests if the size of events exceeds keepalive limits', async () => { + const { dispatch } = batch(`https://api.segment.io`, { + size: 600, + keepalive: true, + }) + + // fatEvent is about ~1kb in size + for (let i = 0; i < 250; i++) { + await dispatch(`https://api.segment.io/v1/t`, { + event: 'small event', + }) + } + expect(fetch).not.toHaveBeenCalled() + + for (let i = 0; i < 65; i++) { + await dispatch(`https://api.segment.io/v1/t`, { + event: 'fat event', + properties: fatEvent, + }) + } + + // still called, even though our batch limit is 600 events + expect(fetch).toHaveBeenCalledTimes(1) + }) + it('sends requests when the timeout expires', async () => { const { dispatch } = batch(`https://api.segment.io`, { size: 100, diff --git a/packages/browser/src/plugins/segmentio/batched-dispatcher.ts b/packages/browser/src/plugins/segmentio/batched-dispatcher.ts index 1fb172d6a..bfc123c80 100644 --- a/packages/browser/src/plugins/segmentio/batched-dispatcher.ts +++ b/packages/browser/src/plugins/segmentio/batched-dispatcher.ts @@ -5,9 +5,11 @@ import { onPageChange } from '../../lib/on-page-change' export type BatchingDispatchConfig = { size?: number timeout?: number + keepalive?: boolean } const MAX_PAYLOAD_SIZE = 500 +const MAX_KEEPALIVE_SIZE = 64 function kilobytes(buffer: unknown): number { const size = encodeURI(JSON.stringify(buffer)).split(/%..|./).length - 1 @@ -23,6 +25,15 @@ function approachingTrackingAPILimit(buffer: unknown): boolean { return kilobytes(buffer) >= MAX_PAYLOAD_SIZE - 50 } +/** + * Checks if payload is over or approaching the limit for keepalive + * requests. If keepalive is enabled we want to avoid + * going over this to prevent data loss. + */ +function passedKeepaliveLimit(buffer: unknown): boolean { + return kilobytes(buffer) >= MAX_KEEPALIVE_SIZE - 10 +} + function chunks(batch: object[]): Array { const result: object[][] = [] let index = 0 @@ -67,7 +78,7 @@ export default function batch( }) return fetch(`https://${apiHost}/b`, { - keepalive: pageUnloaded, + keepalive: config?.keepalive || pageUnloaded, headers: { 'Content-Type': 'text/plain', }, @@ -114,7 +125,9 @@ export default function batch( buffer.push(body) const bufferOverflow = - buffer.length >= limit || approachingTrackingAPILimit(buffer) + buffer.length >= limit || + approachingTrackingAPILimit(buffer) || + (config?.keepalive && passedKeepaliveLimit(buffer)) return bufferOverflow || pageUnloaded ? flush() : scheduleFlush() }