feat(compression): Improve compression implementation (buffering and threshold), add Deno brotli support, add compression tests and dynamic route API
This commit is contained in:
@@ -68,7 +68,8 @@ export function normalizeCompressionConfig(
|
||||
// =============================================================================
|
||||
|
||||
/**
|
||||
* Check if response should be compressed
|
||||
* Check if response should be compressed (preliminary check)
|
||||
* Note: Final threshold check happens in compressResponse after buffering
|
||||
*/
|
||||
export function shouldCompressResponse(
|
||||
response: Response,
|
||||
@@ -97,15 +98,6 @@ export function shouldCompressResponse(
|
||||
return false;
|
||||
}
|
||||
|
||||
// Check size threshold
|
||||
const contentLength = response.headers.get('Content-Length');
|
||||
if (contentLength) {
|
||||
const size = parseInt(contentLength, 10);
|
||||
if (size < (config.threshold ?? DEFAULT_COMPRESSION_CONFIG.threshold)) {
|
||||
return false;
|
||||
}
|
||||
}
|
||||
|
||||
// Check excluded paths
|
||||
if (config.exclude?.length) {
|
||||
const url = new URL(request.url);
|
||||
@@ -166,28 +158,44 @@ export function selectCompressionAlgorithm(
|
||||
|
||||
/**
|
||||
* Compress a Response object
|
||||
* Uses buffered compression for reliability (streaming can have flushing issues)
|
||||
*/
|
||||
export async function compressResponse(
|
||||
response: Response,
|
||||
algorithm: TCompressionAlgorithm,
|
||||
level?: number
|
||||
level?: number,
|
||||
threshold?: number
|
||||
): Promise<Response> {
|
||||
if (algorithm === 'identity' || !response.body) {
|
||||
return response;
|
||||
}
|
||||
|
||||
// Read the entire body first (required for proper compression)
|
||||
const originalBody = new Uint8Array(await response.arrayBuffer());
|
||||
|
||||
// Check threshold - if body is too small, return uncompressed
|
||||
const effectiveThreshold = threshold ?? DEFAULT_COMPRESSION_CONFIG.threshold;
|
||||
if (originalBody.byteLength < effectiveThreshold) {
|
||||
// Return original response with the body we read
|
||||
return new Response(originalBody as unknown as BodyInit, {
|
||||
status: response.status,
|
||||
statusText: response.statusText,
|
||||
headers: response.headers,
|
||||
});
|
||||
}
|
||||
|
||||
const provider = getCompressionProvider();
|
||||
|
||||
// Compress the body
|
||||
const compressedBody = await provider.compress(originalBody, algorithm, level);
|
||||
|
||||
// Clone headers and modify
|
||||
const headers = new Headers(response.headers);
|
||||
headers.set('Content-Encoding', algorithm);
|
||||
headers.set('Vary', appendVaryHeader(headers.get('Vary'), 'Accept-Encoding'));
|
||||
headers.delete('Content-Length'); // Size changes after compression
|
||||
headers.set('Content-Length', compressedBody.byteLength.toString());
|
||||
|
||||
// Compress the body stream
|
||||
const compressedBody = provider.compressStream(response.body, algorithm, level);
|
||||
|
||||
return new Response(compressedBody, {
|
||||
return new Response(compressedBody as unknown as BodyInit, {
|
||||
status: response.status,
|
||||
statusText: response.statusText,
|
||||
headers,
|
||||
|
||||
@@ -164,29 +164,38 @@ class NodeCompressionProvider implements ICompressionProvider {
|
||||
// =============================================================================
|
||||
|
||||
class WebStandardCompressionProvider implements ICompressionProvider {
|
||||
private brotliSupported: boolean | null = null;
|
||||
private _brotliSupported: boolean | null = null;
|
||||
private _isDeno: boolean;
|
||||
|
||||
private checkBrotliSupport(): boolean {
|
||||
if (this.brotliSupported === null) {
|
||||
try {
|
||||
// Try to create a brotli stream - not all runtimes support it
|
||||
new CompressionStream('deflate');
|
||||
// Note: CompressionStream doesn't support 'br' in most runtimes yet
|
||||
this.brotliSupported = false;
|
||||
} catch {
|
||||
this.brotliSupported = false;
|
||||
constructor() {
|
||||
this._isDeno = typeof (globalThis as any).Deno !== 'undefined';
|
||||
}
|
||||
|
||||
/**
|
||||
* Check if brotli is supported via Deno.compress API
|
||||
*/
|
||||
private hasDenoBrotli(): boolean {
|
||||
if (this._brotliSupported === null) {
|
||||
if (this._isDeno) {
|
||||
// Deno 1.37+ has Deno.compress/decompress with brotli support
|
||||
const Deno = (globalThis as any).Deno;
|
||||
this._brotliSupported = typeof Deno?.compress === 'function';
|
||||
} else {
|
||||
this._brotliSupported = false;
|
||||
}
|
||||
}
|
||||
return this.brotliSupported;
|
||||
return this._brotliSupported;
|
||||
}
|
||||
|
||||
getSupportedAlgorithms(): TCompressionAlgorithm[] {
|
||||
// CompressionStream supports gzip and deflate in most runtimes
|
||||
// Brotli support is limited
|
||||
// CompressionStream supports gzip and deflate
|
||||
const algorithms: TCompressionAlgorithm[] = ['gzip', 'deflate'];
|
||||
if (this.checkBrotliSupport()) {
|
||||
|
||||
// Deno has native brotli via Deno.compress
|
||||
if (this.hasDenoBrotli()) {
|
||||
algorithms.unshift('br');
|
||||
}
|
||||
|
||||
return algorithms;
|
||||
}
|
||||
|
||||
@@ -199,46 +208,54 @@ class WebStandardCompressionProvider implements ICompressionProvider {
|
||||
return data;
|
||||
}
|
||||
|
||||
// Map algorithm to CompressionStream format
|
||||
// Brotli falls back to gzip if not supported
|
||||
let format: CompressionFormat;
|
||||
if (algorithm === 'br') {
|
||||
format = this.checkBrotliSupport() ? ('br' as CompressionFormat) : 'gzip';
|
||||
} else {
|
||||
format = algorithm as CompressionFormat;
|
||||
// Use Deno's native brotli if available
|
||||
if (algorithm === 'br' && this.hasDenoBrotli()) {
|
||||
try {
|
||||
const Deno = (globalThis as any).Deno;
|
||||
return await Deno.compress(data, 'br');
|
||||
} catch {
|
||||
// Fall through to return original
|
||||
return data;
|
||||
}
|
||||
}
|
||||
|
||||
try {
|
||||
const stream = new CompressionStream(format);
|
||||
const writer = stream.writable.getWriter();
|
||||
const reader = stream.readable.getReader();
|
||||
// Use CompressionStream for gzip/deflate
|
||||
if (algorithm === 'gzip' || algorithm === 'deflate') {
|
||||
try {
|
||||
const stream = new CompressionStream(algorithm);
|
||||
const writer = stream.writable.getWriter();
|
||||
const reader = stream.readable.getReader();
|
||||
|
||||
// Write data and close (cast for type compatibility)
|
||||
await writer.write(data as unknown as BufferSource);
|
||||
await writer.close();
|
||||
// Write data and close
|
||||
await writer.write(data as unknown as BufferSource);
|
||||
await writer.close();
|
||||
|
||||
// Collect compressed chunks
|
||||
const chunks: Uint8Array[] = [];
|
||||
while (true) {
|
||||
const { done, value } = await reader.read();
|
||||
if (done) break;
|
||||
chunks.push(value);
|
||||
// Collect compressed chunks
|
||||
const chunks: Uint8Array[] = [];
|
||||
while (true) {
|
||||
const { done, value } = await reader.read();
|
||||
if (done) break;
|
||||
chunks.push(value);
|
||||
}
|
||||
|
||||
// Concatenate chunks
|
||||
const totalLength = chunks.reduce((sum, chunk) => sum + chunk.length, 0);
|
||||
const result = new Uint8Array(totalLength);
|
||||
let offset = 0;
|
||||
for (const chunk of chunks) {
|
||||
result.set(chunk, offset);
|
||||
offset += chunk.length;
|
||||
}
|
||||
|
||||
return result;
|
||||
} catch {
|
||||
// Compression failed, return original
|
||||
return data;
|
||||
}
|
||||
|
||||
// Concatenate chunks
|
||||
const totalLength = chunks.reduce((sum, chunk) => sum + chunk.length, 0);
|
||||
const result = new Uint8Array(totalLength);
|
||||
let offset = 0;
|
||||
for (const chunk of chunks) {
|
||||
result.set(chunk, offset);
|
||||
offset += chunk.length;
|
||||
}
|
||||
|
||||
return result;
|
||||
} catch {
|
||||
// Compression failed, return original
|
||||
return data;
|
||||
}
|
||||
|
||||
// Unsupported algorithm
|
||||
return data;
|
||||
}
|
||||
|
||||
compressStream(
|
||||
@@ -250,17 +267,14 @@ class WebStandardCompressionProvider implements ICompressionProvider {
|
||||
return stream;
|
||||
}
|
||||
|
||||
// Map algorithm to CompressionStream format
|
||||
let format: CompressionFormat;
|
||||
if (algorithm === 'br') {
|
||||
format = this.checkBrotliSupport() ? ('br' as CompressionFormat) : 'gzip';
|
||||
} else {
|
||||
format = algorithm as CompressionFormat;
|
||||
// Brotli streaming not supported in Web Standard (Deno.compress is not streaming)
|
||||
// Only gzip/deflate work with CompressionStream
|
||||
if (algorithm !== 'gzip' && algorithm !== 'deflate') {
|
||||
return stream;
|
||||
}
|
||||
|
||||
try {
|
||||
const compressionStream = new CompressionStream(format);
|
||||
// Use type assertion for cross-runtime compatibility
|
||||
const compressionStream = new CompressionStream(algorithm);
|
||||
return stream.pipeThrough(compressionStream as unknown as TransformStream<Uint8Array, Uint8Array>);
|
||||
} catch {
|
||||
// Compression not supported, return original stream
|
||||
|
||||
Reference in New Issue
Block a user