Skip to content

Commit

Permalink
fix(fetch): support Content-Encoding response header (#604)
Browse files Browse the repository at this point in the history
Co-authored-by: Artem Zakharchenko <[email protected]>
  • Loading branch information
mikicho and kettanaito authored Oct 25, 2024
1 parent 6198df0 commit b8f8d51
Show file tree
Hide file tree
Showing 12 changed files with 579 additions and 12 deletions.
2 changes: 1 addition & 1 deletion package.json
Original file line number Diff line number Diff line change
Expand Up @@ -200,4 +200,4 @@
"path": "./node_modules/cz-conventional-changelog"
}
}
}
}
28 changes: 18 additions & 10 deletions src/interceptors/fetch/index.ts
Original file line number Diff line number Diff line change
Expand Up @@ -10,6 +10,7 @@ import { createRequestId } from '../../createRequestId'
import { RESPONSE_STATUS_CODES_WITH_REDIRECT } from '../../utils/responseUtils'
import { createNetworkError } from './utils/createNetworkError'
import { followFetchRedirect } from './utils/followRedirect'
import { decompressResponse } from './utils/decompression'

export class FetchInterceptor extends Interceptor<HttpRequestEventMap> {
static symbol = Symbol('fetch')
Expand Down Expand Up @@ -66,11 +67,18 @@ export class FetchInterceptor extends Interceptor<HttpRequestEventMap> {
requestId,
emitter: this.emitter,
controller,
onResponse: async (response) => {
onResponse: async (rawResponse) => {
this.logger.info('received mocked response!', {
response,
rawResponse,
})

// Decompress the mocked response body, if applicable.
const decompressedStream = decompressResponse(rawResponse)
const response =
decompressedStream === null
? rawResponse
: new Response(decompressedStream, rawResponse)

/**
* Undici's handling of following redirect responses.
* Treat the "manual" redirect mode as a regular mocked response.
Expand Down Expand Up @@ -98,6 +106,14 @@ export class FetchInterceptor extends Interceptor<HttpRequestEventMap> {
}
}

// Set the "response.url" property to equal the intercepted request URL.
Object.defineProperty(response, 'url', {
writable: false,
enumerable: true,
configurable: false,
value: request.url,
})

if (this.emitter.listenerCount('response') > 0) {
this.logger.info('emitting the "response" event...')

Expand All @@ -115,14 +131,6 @@ export class FetchInterceptor extends Interceptor<HttpRequestEventMap> {
})
}

// Set the "response.url" property to equal the intercepted request URL.
Object.defineProperty(response, 'url', {
writable: false,
enumerable: true,
configurable: false,
value: request.url,
})

responsePromise.resolve(response)
},
onRequestError: (response) => {
Expand Down
14 changes: 14 additions & 0 deletions src/interceptors/fetch/utils/brotli-decompress.browser.ts
Original file line number Diff line number Diff line change
@@ -0,0 +1,14 @@
export class BrotliDecompressionStream extends TransformStream {
constructor() {
console.warn(
'[Interceptors]: Brotli decompression of response streams is not supported in the browser'
)

super({
transform(chunk, controller) {
// Keep the stream as passthrough, it does nothing.
controller.enqueue(chunk)
},
})
}
}
31 changes: 31 additions & 0 deletions src/interceptors/fetch/utils/brotli-decompress.ts
Original file line number Diff line number Diff line change
@@ -0,0 +1,31 @@
import zlib from 'node:zlib'

export class BrotliDecompressionStream extends TransformStream {
constructor() {
const decompress = zlib.createBrotliDecompress({
flush: zlib.constants.BROTLI_OPERATION_FLUSH,
finishFlush: zlib.constants.BROTLI_OPERATION_FLUSH,
})

super({
async transform(chunk, controller) {
const buffer = Buffer.from(chunk)

const decompressed = await new Promise<Buffer>((resolve, reject) => {
decompress.write(buffer, (error) => {
if (error) reject(error)
})

decompress.flush()
decompress.once('data', (data) => resolve(data))
decompress.once('error', (error) => reject(error))
decompress.once('end', () => controller.terminate())
}).catch((error) => {
controller.error(error)
})

controller.enqueue(decompressed)
},
})
}
}
85 changes: 85 additions & 0 deletions src/interceptors/fetch/utils/decompression.ts
Original file line number Diff line number Diff line change
@@ -0,0 +1,85 @@
// Import from an internal alias that resolves to different modules
// depending on the environment. This way, we can keep the fetch interceptor
// intact while using different strategies for Brotli decompression.
import { BrotliDecompressionStream } from 'internal:brotli-decompress'

class PipelineStream extends TransformStream {
constructor(
transformStreams: Array<TransformStream>,
...strategies: Array<QueuingStrategy>
) {
super({}, ...strategies)

const readable = [super.readable as any, ...transformStreams].reduce(
(readable, transform) => readable.pipeThrough(transform)
)

Object.defineProperty(this, 'readable', {
get() {
return readable
},
})
}
}

export function parseContentEncoding(contentEncoding: string): Array<string> {
return contentEncoding
.toLowerCase()
.split(',')
.map((coding) => coding.trim())
}

function createDecompressionStream(
contentEncoding: string
): TransformStream | null {
if (contentEncoding === '') {
return null
}

const codings = parseContentEncoding(contentEncoding)

if (codings.length === 0) {
return null
}

const transformers = codings.reduceRight<Array<TransformStream>>(
(transformers, coding) => {
if (coding === 'gzip' || coding === 'x-gzip') {
return transformers.concat(new DecompressionStream('gzip'))
} else if (coding === 'deflate') {
return transformers.concat(new DecompressionStream('deflate'))
} else if (coding === 'br') {
return transformers.concat(new BrotliDecompressionStream())
} else {
transformers.length = 0
}

return transformers
},
[]
)

return new PipelineStream(transformers)
}

export function decompressResponse(
response: Response
): ReadableStream<any> | null {
if (response.body === null) {
return null
}

const decompressionStream = createDecompressionStream(
response.headers.get('content-encoding') || ''
)

if (!decompressionStream) {
return null
}

// Use `pipeTo` and return the decompression stream's readable
// instead of `pipeThrough` because that will lock the original
// response stream, making it unusable as the input to Response.
response.body.pipeTo(decompressionStream.writable)
return decompressionStream.readable
}
24 changes: 24 additions & 0 deletions test/helpers.ts
Original file line number Diff line number Diff line change
@@ -1,5 +1,6 @@
import { urlToHttpOptions } from 'node:url'
import https from 'node:https'
import zlib from 'node:zlib'
import http, { ClientRequest, IncomingMessage, RequestOptions } from 'node:http'
import nodeFetch, { Response, RequestInfo, RequestInit } from 'node-fetch'
import { Page } from '@playwright/test'
Expand Down Expand Up @@ -317,3 +318,26 @@ export const useCors: RequestHandler = (req, res, next) => {
})
return next()
}

/**
* Compress the given data using the specified `Content-Encoding` codings
* left-to-right.
*/
export function compressResponse(
codings: Array<'gzip' | 'x-gzip' | 'deflate' | 'br'>,
input: string
) {
let output = Buffer.from(input)

for (const coding of codings) {
if (coding === 'gzip' || coding === 'x-gzip') {
output = zlib.gzipSync(output)
} else if (coding === 'deflate') {
output = zlib.deflateSync(output)
} else if (coding === 'br') {
output = zlib.brotliCompressSync(output)
}
}

return output
}
Loading

0 comments on commit b8f8d51

Please sign in to comment.