From b3e28aad38198044c27f784d0517714f03ed4f1d Mon Sep 17 00:00:00 2001 From: Etienne Maheux Date: Thu, 26 Sep 2024 21:51:54 +0200 Subject: [PATCH 1/3] Minor improvements for base64 content + HAR _content is always base64-encoded --- .../config/android-device-config.tsx | 5 +--- src/model/events/content-types.ts | 25 +++++++++++++------ src/model/http/har.ts | 16 ++++++------ src/services/ui-worker-formatters.ts | 4 ++- test/unit/model/http/content-types.spec.ts | 19 +++++++++++++- 5 files changed, 48 insertions(+), 21 deletions(-) diff --git a/src/components/intercept/config/android-device-config.tsx b/src/components/intercept/config/android-device-config.tsx index 605d9b22..a1f681c7 100644 --- a/src/components/intercept/config/android-device-config.tsx +++ b/src/components/intercept/config/android-device-config.tsx @@ -64,10 +64,7 @@ const Spacer = styled.div` `; function urlSafeBase64(content: string) { - return stringToBuffer(content) - .toString('base64') - .replace(/\+/g, '-') - .replace(/\//g, '_'); + return stringToBuffer(content).toString('base64url'); } function getConfigRequestIds(eventsStore: EventsStore) { diff --git a/src/model/events/content-types.ts b/src/model/events/content-types.ts index 20d566a2..80d8c4ef 100644 --- a/src/model/events/content-types.ts +++ b/src/model/events/content-types.ts @@ -147,15 +147,25 @@ export function getDefaultMimeType(contentType: ViewableContentType): string { return _.findKey(mimeTypeToContentTypeMap, (c) => c === contentType)!; } -function isValidBase64Byte(byte: number) { +function isValidAlphaNumOrSpace(byte: number) { return (byte >= 65 && byte <= 90) || // A-Z (byte >= 97 && byte <= 122) || // a-z (byte >= 48 && byte <= 57) || // 0-9 - byte === 43 || // + - byte === 47 || // / byte === 61; // = } +function isValidStandardBase64Byte(byte: number) { + // + / (standard) + return byte === 43 || byte === 47 + || isValidAlphaNumOrSpace(byte); +} + +function isValidURLSafeBase64Byte(byte: number) { + // - _ (URL-safe version) + return byte === 45 || byte === 95 + || isValidAlphaNumOrSpace(byte); +} + export function getCompatibleTypes( contentType: ViewableContentType, rawContentType: string | undefined, @@ -203,10 +213,11 @@ export function getCompatibleTypes( if ( body && - body.length > 0 && - body.length % 4 === 0 && // Multiple of 4 bytes - body.length < 1000 * 100 && // < 100 KB of content - body.every(isValidBase64Byte) + !types.has('base64') && + body.length >= 8 && + // body.length % 4 === 0 && // Multiple of 4 bytes (final padding may be omitted) + body.length < 100_000 && // < 100 KB of content + (body.every(isValidStandardBase64Byte) || body.every(isValidURLSafeBase64Byte)) ) { types.add('base64'); } diff --git a/src/model/http/har.ts b/src/model/http/har.ts index 7a53bae3..d159bad3 100644 --- a/src/model/http/har.ts +++ b/src/model/http/har.ts @@ -51,15 +51,15 @@ interface HarLog extends HarFormat.Log { export type RequestContentData = { text: string; size: number; - encoding?: 'base64'; + encoding: 'base64'; comment?: string; }; export interface ExtendedHarRequest extends HarFormat.Request { _requestBodyStatus?: - | 'discarded:too-large' - | 'discarded:not-representable' - | 'discarded:not-decodable'; + | 'discarded:too-large' + | 'discarded:not-representable' // to indicate that extended field `_content` is populated with base64 `postData` + | 'discarded:not-decodable'; _content?: RequestContentData; _trailers?: HarFormat.Header[]; } @@ -302,7 +302,7 @@ async function generateHarResponse( const decoded = await response.body.decodedPromise; - let responseContent: { text: string, encoding?: string } | { comment: string}; + let responseContent: { text: string, encoding?: string } | { comment: string }; try { if (!decoded || decoded.byteLength > options.bodySizeLimit) { // If no body or the body is too large, don't include it @@ -435,10 +435,10 @@ function generateHarWebSocketMessage( return { // Note that msg.direction is from the perspective of Mockttp, not the client. type: message.direction === 'sent' - ? 'receive' + ? 'receive' : message.direction === 'received' ? 'send' - : unreachableCheck(message.direction), + : unreachableCheck(message.direction), opcode: message.isBinary ? 2 : 1, data: message.isBinary @@ -751,7 +751,7 @@ function parseHttpVersion( } function parseHarRequestContents(data: RequestContentData): Buffer { - if (data.encoding && Buffer.isEncoding(data.encoding)) { + if (Buffer.isEncoding(data.encoding)) { return Buffer.from(data.text, data.encoding); } diff --git a/src/services/ui-worker-formatters.ts b/src/services/ui-worker-formatters.ts index 0907fec2..67fef007 100644 --- a/src/services/ui-worker-formatters.ts +++ b/src/services/ui-worker-formatters.ts @@ -39,7 +39,9 @@ const WorkerFormatters = { } }, base64: (content: Buffer) => { - return Buffer.from(content.toString('utf8'), 'base64').toString('utf8'); + const b64 = content.toString('ascii'); + const encoding = b64.match(/[-_]/) ? 'base64url' : 'base64'; + return Buffer.from(b64, encoding).toString('utf8'); }, markdown: (content: Buffer) => { return content.toString('utf8'); diff --git a/test/unit/model/http/content-types.spec.ts b/test/unit/model/http/content-types.spec.ts index defdc826..3d098665 100644 --- a/test/unit/model/http/content-types.spec.ts +++ b/test/unit/model/http/content-types.spec.ts @@ -1,6 +1,6 @@ import { expect } from '../../../test-setup'; -import { getContentType, getEditableContentType } from '../../../../src/model/events/content-types'; +import { getContentType, getEditableContentType, getCompatibleTypes } from '../../../../src/model/events/content-types'; describe('Content type parsing', () => { describe('getContentType', () => { @@ -81,4 +81,21 @@ describe('Content type parsing', () => { expect(ct).to.equal(undefined); }); }); + + describe('getCompatibleTypes', () => { + it('should detect standard base64 text', () => { + const cts = getCompatibleTypes('text', 'text/plain', Buffer.from('FWTkm2+ZvMo=', 'ascii')); + expect(cts).to.deep.equal(['text', 'base64', 'raw']); + }); + + it('should detect URL-safe (without padding) base64 text', () => { + const cts = getCompatibleTypes('text', 'text/plain', Buffer.from('FWTkm2-ZvMo', 'ascii')); + expect(cts).to.deep.equal(['text', 'base64', 'raw']); + }); + + it('should work even if first character is not ASCII', () => { + const cts = getCompatibleTypes('raw', 'application/octet-stream', Buffer.from('1f8d08', 'hex')); // GZIP magic bytes + expect(cts).to.deep.equal(['raw', 'text']); + }); + }); }); \ No newline at end of file From 3d713b3b8f482a96e12afac398dc63f8f5b65009 Mon Sep 17 00:00:00 2001 From: Etienne Maheux Date: Thu, 26 Sep 2024 19:56:36 +0200 Subject: [PATCH 2/3] Handle gRPC compressed payloads + headers are sent to formatters + improve Protobuf/gRPC tests coverage --- src/components/editor/content-viewer.tsx | 5 +- src/components/editor/monaco.ts | 2 +- src/components/send/sent-response-body.tsx | 5 +- src/components/view/http/http-body-card.tsx | 5 +- src/model/events/body-formatting.ts | 25 +-- src/model/events/content-types.ts | 32 ++-- src/services/ui-worker-api.ts | 7 +- src/services/ui-worker-formatters.ts | 59 +++---- src/services/ui-worker.ts | 4 +- src/util/protobuf.ts | 104 ++++++++++-- test/unit/model/http/content-types.spec.ts | 35 ++++ test/unit/util/protobuf.spec.ts | 167 ++++++++++++++++++-- 12 files changed, 352 insertions(+), 98 deletions(-) diff --git a/src/components/editor/content-viewer.tsx b/src/components/editor/content-viewer.tsx index b9ea19df..999825e6 100644 --- a/src/components/editor/content-viewer.tsx +++ b/src/components/editor/content-viewer.tsx @@ -5,6 +5,7 @@ import { observer } from 'mobx-react'; import { SchemaObject } from 'openapi3-ts'; import * as portals from 'react-reverse-portal'; +import { Headers } from '../../types'; import { styled } from '../../styles'; import { ObservablePromise, isObservablePromise } from '../../util/observable'; import { asError, unreachableCheck } from '../../util/error'; @@ -22,7 +23,7 @@ interface ContentViewerProps { children: Buffer | string; schema?: SchemaObject; expanded: boolean; - rawContentType?: string; + headers?: Headers; contentType: ViewableContentType; editorNode: portals.HtmlPortalNode; cache: Map; @@ -199,7 +200,7 @@ export class ContentViewer extends React.Component { return ; } diff --git a/src/components/editor/monaco.ts b/src/components/editor/monaco.ts index 2c933a64..b79f4183 100644 --- a/src/components/editor/monaco.ts +++ b/src/components/editor/monaco.ts @@ -62,7 +62,7 @@ async function loadMonacoEditor(retries = 5): Promise { id: 'protobuf-decoding-header', command: { id: '', // No actual command defined here - title: "Automatically decoded from raw Protobuf data", + title: "Automatically decoded from raw Protobuf/gRPC data", }, }, ], diff --git a/src/components/send/sent-response-body.tsx b/src/components/send/sent-response-body.tsx index c8daedf0..ad06d87e 100644 --- a/src/components/send/sent-response-body.tsx +++ b/src/components/send/sent-response-body.tsx @@ -81,7 +81,8 @@ export class SentResponseBodyCard extends React.Component; + render(content: Buffer, headers?: Headers): string | ObservablePromise; } type FormatComponentProps = { content: Buffer; - rawContentType: string | undefined; + headers?: Headers; }; type FormatComponent = React.ComponentType; @@ -35,8 +36,8 @@ export function isEditorFormatter(input: any): input is EditorFormatter { } const buildAsyncRenderer = (formatKey: WorkerFormatterKey) => - (input: Buffer) => observablePromise( - formatBufferAsync(input, formatKey) + (input: Buffer, headers?: Headers) => observablePromise( + formatBufferAsync(input, formatKey, headers) ); export const Formatters: { [key in ViewableContentType]: Formatter } = { @@ -44,8 +45,8 @@ export const Formatters: { [key in ViewableContentType]: Formatter } = { language: 'text', cacheKey: Symbol('raw'), isEditApplicable: false, - render: (input: Buffer) => { - if (input.byteLength < 2000) { + render: (input: Buffer, headers?: Headers) => { + if (input.byteLength < 2_000) { try { // For short-ish inputs, we return synchronously - conveniently this avoids // showing the loading spinner that churns the layout in short content cases. @@ -55,7 +56,7 @@ export const Formatters: { [key in ViewableContentType]: Formatter } = { } } else { return observablePromise( - formatBufferAsync(input, 'raw') + formatBufferAsync(input, 'raw', headers) ); } } @@ -64,7 +65,7 @@ export const Formatters: { [key in ViewableContentType]: Formatter } = { language: 'text', cacheKey: Symbol('text'), isEditApplicable: false, - render: (input: Buffer) => { + render: (input: Buffer, headers?: Headers) => { return bufferToString(input); } }, @@ -102,8 +103,8 @@ export const Formatters: { [key in ViewableContentType]: Formatter } = { language: 'json', cacheKey: Symbol('json'), isEditApplicable: true, - render: (input: Buffer) => { - if (input.byteLength < 10000) { + render: (input: Buffer, headers?: Headers) => { + if (input.byteLength < 10_000) { const inputAsString = bufferToString(input); try { @@ -111,7 +112,7 @@ export const Formatters: { [key in ViewableContentType]: Formatter } = { // showing the loading spinner that churns the layout in short content cases. return JSON.stringify( JSON.parse(inputAsString), - null, 2); + null, 2); // ^ Same logic as in UI-worker-formatter } catch (e) { // Fallback to showing the raw un-formatted JSON: @@ -119,7 +120,7 @@ export const Formatters: { [key in ViewableContentType]: Formatter } = { } } else { return observablePromise( - formatBufferAsync(input, 'json') + formatBufferAsync(input, 'json', headers) ); } } diff --git a/src/model/events/content-types.ts b/src/model/events/content-types.ts index 80d8c4ef..ef85bbd9 100644 --- a/src/model/events/content-types.ts +++ b/src/model/events/content-types.ts @@ -1,8 +1,11 @@ import * as _ from 'lodash'; -import { MessageBody } from '../../types'; + +import { Headers, MessageBody } from '../../types'; import { isProbablyProtobuf, - isValidProtobuf + isValidProtobuf, + isProbablyGrpcProto, + isValidGrpcProto, } from '../../util/protobuf'; // Simplify a mime type as much as we can, without throwing any errors @@ -21,7 +24,7 @@ export const getBaseContentType = (mimeType: string | undefined) => { return type + '/' + combinedSubTypes; } - // Otherwise, wr collect a list of types from most specific to most generic: [svg, xml] for image/svg+xml + // Otherwise, we collect a list of types from most specific to most generic: [svg, xml] for image/svg+xml // and then look through in order to see if there are any matches here: const subTypes = combinedSubTypes.split('+'); const possibleTypes = subTypes.map(st => type + '/' + st); @@ -112,6 +115,9 @@ const mimeTypeToContentTypeMap: { [mimeType: string]: ViewableContentType } = { 'application/x-protobuffer': 'protobuf', // Commonly seen in Google apps 'application/grpc+proto': 'grpc-proto', // Used in GRPC requests (protobuf but with special headers) + 'application/grpc+protobuf': 'grpc-proto', + 'application/grpc-proto': 'grpc-proto', + 'application/grpc-protobuf': 'grpc-proto', 'application/octet-stream': 'raw' } as const; @@ -169,7 +175,8 @@ function isValidURLSafeBase64Byte(byte: number) { export function getCompatibleTypes( contentType: ViewableContentType, rawContentType: string | undefined, - body: MessageBody | Buffer | undefined + body: MessageBody | Buffer | undefined, + headers?: Headers, ): ViewableContentType[] { let types = new Set([contentType]); @@ -190,15 +197,11 @@ export function getCompatibleTypes( types.add('xml'); } - if (!types.has('grpc-proto') && rawContentType === 'application/grpc') { - types.add('grpc-proto') - } - if ( body && - isProbablyProtobuf(body) && !types.has('protobuf') && !types.has('grpc-proto') && + isProbablyProtobuf(body) && // If it's probably unmarked protobuf, and it's a manageable size, try // parsing it just to check: (body.length < 100_000 && isValidProtobuf(body)) @@ -206,6 +209,17 @@ export function getCompatibleTypes( types.add('protobuf'); } + if ( + body && + !types.has('grpc-proto') && + isProbablyGrpcProto(body, headers ?? {}) && + // If it's probably unmarked gRPC, and it's a manageable size, try + // parsing it just to check: + (body.length < 100_000 && isValidGrpcProto(body, headers ?? {})) + ) { + types.add('grpc-proto'); + } + // SVGs can always be shown as XML if (rawContentType && rawContentType.startsWith('image/svg')) { types.add('xml'); diff --git a/src/services/ui-worker-api.ts b/src/services/ui-worker-api.ts index 5b9770af..e08ba6f4 100644 --- a/src/services/ui-worker-api.ts +++ b/src/services/ui-worker-api.ts @@ -21,7 +21,7 @@ import type { ParseCertResponse } from './ui-worker'; -import { Omit } from '../types'; +import { Headers, Omit } from '../types'; import type { ApiMetadata, ApiSpec } from '../model/api/api-interfaces'; import { WorkerFormatterKey } from './ui-worker-formatters'; @@ -149,10 +149,11 @@ export async function parseCert(buffer: ArrayBuffer) { })).result; } -export async function formatBufferAsync(buffer: ArrayBuffer, format: WorkerFormatterKey) { +export async function formatBufferAsync(buffer: ArrayBuffer, format: WorkerFormatterKey, headers?: Headers) { return (await callApi({ type: 'format', buffer, - format + format, + headers, })).formatted; } \ No newline at end of file diff --git a/src/services/ui-worker-formatters.ts b/src/services/ui-worker-formatters.ts index 67fef007..dfb360c3 100644 --- a/src/services/ui-worker-formatters.ts +++ b/src/services/ui-worker-formatters.ts @@ -5,6 +5,7 @@ import { } from 'js-beautify/js/lib/beautifier'; import * as beautifyXml from 'xml-beautifier'; +import { Headers } from '../types'; import { bufferToHex, bufferToString, getReadableSize } from '../util/buffer'; import { parseRawProtobuf, extractProtobufFromGrpc } from '../util/protobuf'; @@ -13,10 +14,25 @@ const FIVE_MB = 1024 * 1024 * 5; export type WorkerFormatterKey = keyof typeof WorkerFormatters; -export function formatBuffer(buffer: ArrayBuffer, format: WorkerFormatterKey): string { - return WorkerFormatters[format](Buffer.from(buffer)); +export function formatBuffer(buffer: ArrayBuffer, format: WorkerFormatterKey, headers?: Headers): string { + return WorkerFormatters[format](Buffer.from(buffer), headers); } +const prettyProtobufView = (data: any) => JSON.stringify(data, (_key, value) => { + // Buffers have toJSON defined, so arrive here in JSONified form: + if (value.type === 'Buffer' && Array.isArray(value.data)) { + const buffer = Buffer.from(value.data); + + return { + "Type": `Buffer (${getReadableSize(buffer)})`, + "As string": bufferToString(buffer, 'detect-encoding'), + "As hex": bufferToHex(buffer) + } + } else { + return value; + } +}, 2); + // A subset of all possible formatters (those allowed by body-formatting), which require // non-trivial processing, and therefore need to be processed async. const WorkerFormatters = { @@ -76,44 +92,15 @@ const WorkerFormatters = { }); }, protobuf: (content: Buffer) => { - const data = parseRawProtobuf(content, { - prefix: '' - }); - - return JSON.stringify(data, (_key, value) => { - // Buffers have toJSON defined, so arrive here in JSONified form: - if (value.type === 'Buffer' && Array.isArray(value.data)) { - const buffer = Buffer.from(value.data); - - return { - "Type": `Buffer (${getReadableSize(buffer)})`, - "As string": bufferToString(buffer, 'detect-encoding'), - "As hex": bufferToHex(buffer) - } - } else { - return value; - } - }, 2); + const data = parseRawProtobuf(content, { prefix: '' }); + return prettyProtobufView(data); }, - 'grpc-proto': (content: Buffer) => { - const protobufMessages = extractProtobufFromGrpc(content); + 'grpc-proto': (content: Buffer, headers?: Headers) => { + const protobufMessages = extractProtobufFromGrpc(content, headers ?? {}); let data = protobufMessages.map((msg) => parseRawProtobuf(msg, { prefix: '' })); if (data.length === 1) data = data[0]; - return JSON.stringify(data, (_key, value) => { - // Buffers have toJSON defined, so arrive here in JSONified form: - if (value.type === 'Buffer' && Array.isArray(value.data)) { - const buffer = Buffer.from(value.data); - - return { - "Type": `Buffer (${getReadableSize(buffer)})`, - "As string": bufferToString(buffer, 'detect-encoding'), - "As hex": bufferToHex(buffer) - } - } else { - return value; - } - }, 2); + return prettyProtobufView(data); } } as const; \ No newline at end of file diff --git a/src/services/ui-worker.ts b/src/services/ui-worker.ts index 631caba6..817fe814 100644 --- a/src/services/ui-worker.ts +++ b/src/services/ui-worker.ts @@ -13,6 +13,7 @@ import { } from 'http-encoding'; import { OpenAPIObject } from 'openapi-directory'; +import { Headers } from '../types'; import { ApiMetadata, ApiSpec } from '../model/api/api-interfaces'; import { buildOpenApiMetadata, buildOpenRpcMetadata } from '../model/api/build-api-metadata'; import { parseCert, ParsedCertificate, validatePKCS12, ValidationResult } from '../model/crypto'; @@ -91,6 +92,7 @@ export interface FormatRequest extends Message { type: 'format'; buffer: ArrayBuffer; format: WorkerFormatterKey; + headers?: Headers; } export interface FormatResponse extends Message { @@ -217,7 +219,7 @@ ctx.addEventListener('message', async (event: { data: BackgroundRequest }) => { break; case 'format': - const formatted = formatBuffer(event.data.buffer, event.data.format); + const formatted = formatBuffer(event.data.buffer, event.data.format, event.data.headers); ctx.postMessage({ id: event.data.id, formatted }); break; diff --git a/src/util/protobuf.ts b/src/util/protobuf.ts index aae5347d..a0a5df7c 100644 --- a/src/util/protobuf.ts +++ b/src/util/protobuf.ts @@ -1,53 +1,113 @@ import parseRawProto from 'rawprotoparse'; +import { gunzipSync, inflateSync } from 'zlib'; + +import { Headers } from '../types'; +import { lastHeader } from './headers'; export function isProbablyProtobuf(input: Uint8Array) { - // Protobuf data starts with a varint, consisting of a field - // number (1 - 2^29-1) and a field type (0, 1, 2, 3, 4, 5) + // Protobuf data starts with a varint, consisting of a + // field number in [1, 2^29[ and a field type in [0, 5]*. // Unfortunately, that matches a very wide set of values, - // including things like '<' and '{' that are widely used - // elsewhere. + // including things like '<', '[' and '{' that are widely + // used in other contexts. + // * Hopefully, field types 3 & 4 have been deprecated for a while, + // we thus consider them as invalid for this quick inference. // To handle that, we're more strict here, and we assume that - // field 1 will be first (very common, but not guaranteed). + // first field is tiny (<= 3) (very common, but not guaranteed). // This is a best-efforts check for messages with no other // indicators (no matching content-type) so that's OK. - // This implies a first byte from 08 to 0D, which has no obvious + // This implies a first byte from 08 to 1D, which is not + // in range of printable ASCII characters and has no obvious // conflicts in https://en.wikipedia.org/wiki/List_of_file_signatures // but does notably conflict with tab/cr/lf. // That makes this good as a very quick first check, but confirming // actual parsing is required to check more thoroughly. + if (input.length < 2) { + return false; + } - const fieldNumber = input[0] >>> 3; + const fieldNumberTrunc = input[0] >>> 3; const fieldType = input[0] & 0b111; - return fieldNumber === 1 && - fieldType >= 1 && - fieldType <= 6; + return fieldNumberTrunc >= 1 && + fieldNumberTrunc <= 3 && + [0, 1, 2, 5].includes(fieldType); } export const parseRawProtobuf = parseRawProto; // GRPC message structure: +// Ref: https://github.com/grpc/grpc/blob/master/doc/PROTOCOL-HTTP2.md +// // The repeated sequence of Length-Prefixed-Message items is delivered in DATA frames // Length-Prefixed-Message → Compressed-Flag Message-Length Message // Compressed-Flag → 0 / 1 ; encoded as 1 byte unsigned integer // Message-Length → {length of Message} ; encoded as 4 byte unsigned integer (big endian) // Message → *{binary octet} -export const extractProtobufFromGrpc = (input: Buffer) => { - const protobufMessasges: Buffer[] = []; +// +// A Compressed-Flag value of 1 indicates that the binary octet sequence of Message is +// compressed using the mechanism declared by the Message-Encoding header. +// A value of 0 indicates that no encoding of Message bytes has occurred. +// If the Message-Encoding header is omitted then the Compressed-Flag must be 0. +export const extractProtobufFromGrpc = (input: Buffer, headers: Headers) => { + const grpcEncoding = lastHeader(headers['grpc-encoding'] ?? 'identity').toLocaleLowerCase(); + const grpcDecoder = grpcEncoding == 'gzip' ? gunzipSync : grpcEncoding == 'deflate' ? inflateSync : undefined; + const protobufMessages: Buffer[] = []; + + // useful indices for debugging + let offset = 0; + let msgIndex = 0; while (input.length > 0) { - if (input.readInt8() != 0) { - throw new Error("Compressed gRPC messages not yet supported") + const errorPrefix = `gRPC message #${msgIndex} @${offset}: ` + const compressionFlag = input.readUInt8(); + const length = input.readUInt32BE(1); + let message = input.slice(5, 5 + length); + if (message.length != length) { + throw new Error(`${errorPrefix}length of message is corrupted`); } - const length = input.readInt32BE(1); - protobufMessasges.push(input.slice(5, 5 + length)); + switch (compressionFlag) { + case 0: // may happen even if grpc-encoding != identity according to specs + break; + case 1: + if (!grpcDecoder) { + throw new Error(`${errorPrefix}not expected to be compressed`); + } + try { + message = grpcDecoder(message); + } catch (err) { + throw new Error(`${errorPrefix}failed decompression (from ${grpcEncoding})`); + } + break; + default: + throw new Error(`${errorPrefix}unsupported compression flag (0x${compressionFlag.toString(16).padStart(2, '0')})`); + } + + protobufMessages.push(message); input = input.subarray(5 + length); + offset += 5 + length; + msgIndex++; } - return protobufMessasges; + return protobufMessages; +} + +export const isProbablyGrpcProto = (input: Buffer, headers: Headers) => { + if (input.byteLength < 7) { + return false; + } + const compressionFlag = input.readUInt8(); + const length = input.readUInt32BE(1); + const firstMessage = input.slice(5, 5 + length); + return length >= 2 && // at least two bytes for Protobuf message (tag & value) + firstMessage.length == length && + ( + (compressionFlag == 0 && isProbablyProtobuf(firstMessage)) || + (compressionFlag == 1 && Object.keys(headers).includes('grpc-encoding')) + ) } export const isValidProtobuf = (input: Uint8Array) => { @@ -57,4 +117,14 @@ export const isValidProtobuf = (input: Uint8Array) => { } catch (e) { return false; } +} + +export const isValidGrpcProto = (input: Buffer, headers: Headers) => { + try { + const protobufMessages = extractProtobufFromGrpc(input, headers); + protobufMessages.forEach((msg) => parseRawProtobuf(msg)); + return true; + } catch (e) { + return false; + } } \ No newline at end of file diff --git a/test/unit/model/http/content-types.spec.ts b/test/unit/model/http/content-types.spec.ts index 3d098665..eec28b53 100644 --- a/test/unit/model/http/content-types.spec.ts +++ b/test/unit/model/http/content-types.spec.ts @@ -59,6 +59,21 @@ describe('Content type parsing', () => { expect(ct).to.equal('grpc-proto'); }); + it('should render application/grpc+protobuf as protobuf grpc', () => { + const ct = getContentType('application/grpc+protobuf'); + expect(ct).to.equal('grpc-proto'); + }); + + it('should render application/grpc-proto as protobuf grpc', () => { + const ct = getContentType('application/grpc-proto'); + expect(ct).to.equal('grpc-proto'); + }); + + it('should render application/grpc-protobuf as protobuf grpc', () => { + const ct = getContentType('application/grpc-protobuf'); + expect(ct).to.equal('grpc-proto'); + }); + it('should render application/grpc+json as JSON', () => { const ct = getContentType('application/grpc+json'); expect(ct).to.equal('json'); @@ -97,5 +112,25 @@ describe('Content type parsing', () => { const cts = getCompatibleTypes('raw', 'application/octet-stream', Buffer.from('1f8d08', 'hex')); // GZIP magic bytes expect(cts).to.deep.equal(['raw', 'text']); }); + + it('should flag application/grpc as compatible with [grpc-proto,text,raw]', () => { + const cts = getCompatibleTypes('grpc-proto', 'application/grpc', undefined); + expect(cts).to.deep.equal(['grpc-proto', 'text', 'raw']); + }); + + it('should flag application/grpc+proto as compatible with [grpc-proto,text,raw]', () => { + const cts = getCompatibleTypes('grpc-proto', 'application/grpc+proto', undefined); + expect(cts).to.deep.equal(['grpc-proto', 'text', 'raw']); + }); + + it('should flag application/grpc+json as compatible with [grpc-proto,text,raw]', () => { + const cts = getCompatibleTypes('json', 'application/grpc+json', undefined); + expect(cts).to.deep.equal(['json', 'text', 'raw']); + }); + + it('should detect undeclared grpc+proto', () => { + const cts = getCompatibleTypes('raw', 'application/octet-stream', Buffer.from('AAAAAAIIAQ==', 'base64')); + expect(cts).to.deep.equal(['raw', 'grpc-proto', 'text']); + }); }); }); \ No newline at end of file diff --git a/test/unit/util/protobuf.spec.ts b/test/unit/util/protobuf.spec.ts index ab032c62..ea50673c 100644 --- a/test/unit/util/protobuf.spec.ts +++ b/test/unit/util/protobuf.spec.ts @@ -1,6 +1,43 @@ import { expect } from "../../test-setup"; -import { isProbablyProtobuf } from "../../../src/util/protobuf"; +import { Headers } from '../../../src/types'; +import { isProbablyProtobuf, parseRawProtobuf, extractProtobufFromGrpc } from "../../../src/util/protobuf"; + +const bufferFromHex = (hex: string) => Buffer.from(hex.replace(/:/g, ''), 'hex'); +const uint32HexLengthFromHexColon = (hex: string) => ((hex.length + 1) / 3).toString(16).padStart(8, '0'); // no overflow check + +const _M1 = `syntax = "proto2"; +message M1 { + optional string msg = 1; +}`; + +const m1 = '0a:0b:48:65:6c:6c:6f:20:57:6f:72:6c:64'; +const m1Js = { "1": "Hello World" }; + +const mLastFieldNb = `fa:ff:ff:ff:0f:${m1.slice(3)}`; // #536870911(=2^29-1): "Hello World" + +const m1b = '0a:09:46:72:6f:6d:20:67:52:50:43'; +const m1bJs = { "1": "From gRPC" }; + +const m1Deflate = '78:9c:05:80:31:09:00:00:08:04:77:2d:61:1c:1b:58:40:b7:83:07:fb:0f:4f:64:1f:a8:46:cf:1a:19:13:04:32'; +const m1bDeflate = '78:5e:e3:e2:74:2b:ca:cf:55:48:0f:0a:70:06:00:10:85:03:14'; + +const _M2 = `syntax = "proto3"; +message M2 { + uint64 id = 3; + string name = 42; + double timestamp = 99; +}`; + +const m2 = '18:7b:d2:02:19:48:65:6c:6c:6f:20:57:6f:72:6c:64:20:77:69:74:68:20:55:54:46:38:20:e2:86:90:99:06:b9:c7:ad:df:47:bd:d9:41'; +const m2Js = { + "3": 123, + "42": "Hello World with UTF8 ←", + "99": bufferFromHex(m2.slice(-8 * 3 + 1)), // 1727340414.715315 as double ( often interpreted as fixed64 instead of double without schema) +} + +// Fixed Huffman coding (with checksum) +const m2Gzip = '1f:8b:08:02:88:94:f5:66:00:ff:f2:8f:93:a8:be:c4:24:e9:91:9a:93:93:af:10:9e:5f:94:93:a2:50:9e:59:92:a1:10:1a:e2:66:a1:f0:a8:6d:c2:4c:b6:9d:c7:d7:de:77:df:7b:d3:11:00:7f:e5:0c:b7:28:00:00:00'; describe("isProbablyProtobuf", () => { @@ -10,12 +47,18 @@ describe("isProbablyProtobuf", () => { ).to.equal(false); }); - it("should not recognize JSON as Protobuf", () => { + it("should not recognize JSON dict as Protobuf", () => { expect( isProbablyProtobuf(Buffer.from('{}', 'utf8')) ).to.equal(false); }); + it("should not recognize JSON array as Protobuf", () => { + expect( + isProbablyProtobuf(Buffer.from('[]', 'utf8')) + ).to.equal(false); + }); + it("should not recognize HTML as Protobuf", () => { expect( isProbablyProtobuf(Buffer.from('', 'utf8')) @@ -24,22 +67,120 @@ describe("isProbablyProtobuf", () => { it("should recognize basic protobuf", () => { expect( - isProbablyProtobuf(Buffer.from( - // Field 1 - string - Hello World - '0a 0b 48 65 6c 6c 6f 20 57 6f 72 6c 64', - 'hex' - )) + isProbablyProtobuf(bufferFromHex(m1)) ).to.equal(true); }); - it("should not recognize protobuf with invalid field numbers", () => { + it("should recognize more complex protobuf", () => { expect( - isProbablyProtobuf(Buffer.from( - // Field 2^28 (invalid) - 'fa ff ff ff 08 0b 48 65 6c 6c 6f 20 77 6f 72 6c 64', - 'hex' - )) + isProbablyProtobuf(bufferFromHex(m2)) + ).to.equal(true); + }); + + it("should not recognize protobuf with first field number too high", () => { + expect( + isProbablyProtobuf(bufferFromHex(mLastFieldNb)) ).to.equal(false); }); +}); + +const GRPCFixtures: { [key: string]: [string, Headers, any[]] } = { + // No compression + "should handle simplest gRPC payload (basic mono-message, uncompressed)": [ + `00:${uint32HexLengthFromHexColon(m1)}:${m1}`, + { 'grpc-encoding': 'identity' }, + [m1Js], + ], + "should handle usual gRPC payload (more complex mono-message, uncompressed without explicit encoding)": [ + `00:${uint32HexLengthFromHexColon(m2)}:${m2}`, + {}, // no grpc-encoding (identity by default) + [m2Js], + ], + "should handle multiple uncompressed gRPC messages": [ + `00:${uint32HexLengthFromHexColon(m1)}:${m1}:00:${uint32HexLengthFromHexColon(m1b)}:${m1b}`, // 2 uncompressed messages + { 'grpc-encoding': 'identity' }, + [m1Js, m1bJs], + ], + // Compressed + "should handle basic compressed (with deflate) gRPC payload": [ + `01:${uint32HexLengthFromHexColon(m1Deflate)}:${m1Deflate}`, + { 'grpc-encoding': 'deflate' }, + [m1Js], + ], + "should handle basic compressed (with gzip) gRPC payload": [ + `01:${uint32HexLengthFromHexColon(m2Gzip)}:${m2Gzip}`, + { 'grpc-encoding': 'gzip' }, + [m2Js], + ], + "should handle multiple compressed gRPC messages": [ + `00:${uint32HexLengthFromHexColon(m1)}:${m1}:01:${uint32HexLengthFromHexColon(m1bDeflate)}:${m1bDeflate}`, // per-message compression is optional + { 'grpc-encoding': 'deflate' }, + [m1Js, m1bJs], + ], + +}; + +describe("extractProtobufFromGrpc", () => { + + Object.entries(GRPCFixtures).forEach(([testName, [hexGrpc, headers, expectedMsgs]]) => it(testName, () => { + const protoMsgs = extractProtobufFromGrpc(bufferFromHex(hexGrpc), headers).map((msg) => parseRawProtobuf(msg, { prefix: '' })); + expect(protoMsgs).to.deep.equal(expectedMsgs); + })); + + it("should fail for compression flag != {0,1}", () => { + const f = extractProtobufFromGrpc.bind(null, bufferFromHex(`02:${uint32HexLengthFromHexColon(m1)}:${m1}`), {}); + expect(f).to.throw(Error); + }); + + it("should reject compressed payload when grpc-encoding is identity", () => { + const f = extractProtobufFromGrpc.bind( + null, + bufferFromHex(`01:${uint32HexLengthFromHexColon(m1)}:${m1}`), + { 'grpc-encoding': 'identity' }, + ); + expect(f).to.throw(Error); + }); + + it("should reject compressed payload when grpc-encoding is not provided", () => { + const f = extractProtobufFromGrpc.bind(null, bufferFromHex(`01:${uint32HexLengthFromHexColon(m1)}:${m1}`), {}); + expect(f).to.throw(Error); + }); + + it("should fail for wrongly declared grpc-encoding (gzip)", () => { + const f = extractProtobufFromGrpc.bind( + null, + bufferFromHex(`01:${uint32HexLengthFromHexColon(m1Deflate)}:${m1Deflate}`), + { 'grpc-encoding': 'gzip' }, + ); + expect(f).to.throw(Error); + }); + + it("should fail for wrongly declared grpc-encoding (deflate)", () => { + const f = extractProtobufFromGrpc.bind( + null, + bufferFromHex(`01:${uint32HexLengthFromHexColon(m2Gzip)}:${m2Gzip}`), + { 'grpc-encoding': 'deflate' }, + ); + expect(f).to.throw(Error); + }); + + it("should fail for corrupted deflate payload", () => { + const f = extractProtobufFromGrpc.bind( + null, + bufferFromHex(`01:${uint32HexLengthFromHexColon(m1Deflate.slice(0, -6))}:${m1Deflate}`), + { 'grpc-encoding': 'deflate' }, + ); + expect(f).to.throw(Error); + }); + + it("should fail for corrupted gzip payload", () => { + const f = extractProtobufFromGrpc.bind( + null, + bufferFromHex(`01:${uint32HexLengthFromHexColon(m2Gzip.slice(0, -6))}:${m2Gzip}`), + { 'grpc-encoding': 'gzip' }, + ); + expect(f).to.throw(Error); + }); + }); \ No newline at end of file From 2fdcab7e23c1ab82e130b135839e4c65bb771f0a Mon Sep 17 00:00:00 2001 From: Tim Perry Date: Wed, 2 Oct 2024 18:06:07 +0200 Subject: [PATCH 3/3] Add some last small fixes/tweaks for gRPC compression change --- src/components/editor/content-viewer.tsx | 3 ++- .../intercept/config/android-device-config.tsx | 5 ++++- src/model/events/body-formatting.ts | 8 +++++--- src/model/events/content-types.ts | 12 +++++++----- src/model/http/har.ts | 10 +++++----- src/services/ui-worker-formatters.ts | 3 +-- 6 files changed, 24 insertions(+), 17 deletions(-) diff --git a/src/components/editor/content-viewer.tsx b/src/components/editor/content-viewer.tsx index 999825e6..69f7cea7 100644 --- a/src/components/editor/content-viewer.tsx +++ b/src/components/editor/content-viewer.tsx @@ -10,6 +10,7 @@ import { styled } from '../../styles'; import { ObservablePromise, isObservablePromise } from '../../util/observable'; import { asError, unreachableCheck } from '../../util/error'; import { stringToBuffer } from '../../util/buffer'; +import { lastHeader } from '../../util/headers'; import { ViewableContentType } from '../../model/events/content-types'; import { Formatters, isEditorFormatter } from '../../model/events/body-formatting'; @@ -200,7 +201,7 @@ export class ContentViewer extends React.Component { return ; } diff --git a/src/components/intercept/config/android-device-config.tsx b/src/components/intercept/config/android-device-config.tsx index a1f681c7..605d9b22 100644 --- a/src/components/intercept/config/android-device-config.tsx +++ b/src/components/intercept/config/android-device-config.tsx @@ -64,7 +64,10 @@ const Spacer = styled.div` `; function urlSafeBase64(content: string) { - return stringToBuffer(content).toString('base64url'); + return stringToBuffer(content) + .toString('base64') + .replace(/\+/g, '-') + .replace(/\//g, '_'); } function getConfigRequestIds(eventsStore: EventsStore) { diff --git a/src/model/events/body-formatting.ts b/src/model/events/body-formatting.ts index ad092238..147673be 100644 --- a/src/model/events/body-formatting.ts +++ b/src/model/events/body-formatting.ts @@ -19,7 +19,7 @@ export interface EditorFormatter { type FormatComponentProps = { content: Buffer; - headers?: Headers; + rawContentType: string | undefined; }; type FormatComponent = React.ComponentType; @@ -65,7 +65,7 @@ export const Formatters: { [key in ViewableContentType]: Formatter } = { language: 'text', cacheKey: Symbol('text'), isEditApplicable: false, - render: (input: Buffer, headers?: Headers) => { + render: (input: Buffer) => { return bufferToString(input); } }, @@ -112,7 +112,9 @@ export const Formatters: { [key in ViewableContentType]: Formatter } = { // showing the loading spinner that churns the layout in short content cases. return JSON.stringify( JSON.parse(inputAsString), - null, 2); + null, + 2 + ); // ^ Same logic as in UI-worker-formatter } catch (e) { // Fallback to showing the raw un-formatted JSON: diff --git a/src/model/events/content-types.ts b/src/model/events/content-types.ts index ef85bbd9..d632be1e 100644 --- a/src/model/events/content-types.ts +++ b/src/model/events/content-types.ts @@ -153,7 +153,7 @@ export function getDefaultMimeType(contentType: ViewableContentType): string { return _.findKey(mimeTypeToContentTypeMap, (c) => c === contentType)!; } -function isValidAlphaNumOrSpace(byte: number) { +function isAlphaNumOrEquals(byte: number) { return (byte >= 65 && byte <= 90) || // A-Z (byte >= 97 && byte <= 122) || // a-z (byte >= 48 && byte <= 57) || // 0-9 @@ -162,14 +162,16 @@ function isValidAlphaNumOrSpace(byte: number) { function isValidStandardBase64Byte(byte: number) { // + / (standard) - return byte === 43 || byte === 47 - || isValidAlphaNumOrSpace(byte); + return byte === 43 || + byte === 47 || + isAlphaNumOrEquals(byte); } function isValidURLSafeBase64Byte(byte: number) { // - _ (URL-safe version) - return byte === 45 || byte === 95 - || isValidAlphaNumOrSpace(byte); + return byte === 45 || + byte === 95 || + isAlphaNumOrEquals(byte); } export function getCompatibleTypes( diff --git a/src/model/http/har.ts b/src/model/http/har.ts index d159bad3..c7c52c04 100644 --- a/src/model/http/har.ts +++ b/src/model/http/har.ts @@ -57,9 +57,9 @@ export type RequestContentData = { export interface ExtendedHarRequest extends HarFormat.Request { _requestBodyStatus?: - | 'discarded:too-large' - | 'discarded:not-representable' // to indicate that extended field `_content` is populated with base64 `postData` - | 'discarded:not-decodable'; + | 'discarded:too-large' + | 'discarded:not-representable' // to indicate that extended field `_content` is populated with base64 `postData` + | 'discarded:not-decodable'; _content?: RequestContentData; _trailers?: HarFormat.Header[]; } @@ -435,10 +435,10 @@ function generateHarWebSocketMessage( return { // Note that msg.direction is from the perspective of Mockttp, not the client. type: message.direction === 'sent' - ? 'receive' + ? 'receive' : message.direction === 'received' ? 'send' - : unreachableCheck(message.direction), + : unreachableCheck(message.direction), opcode: message.isBinary ? 2 : 1, data: message.isBinary diff --git a/src/services/ui-worker-formatters.ts b/src/services/ui-worker-formatters.ts index dfb360c3..a6cd9390 100644 --- a/src/services/ui-worker-formatters.ts +++ b/src/services/ui-worker-formatters.ts @@ -56,8 +56,7 @@ const WorkerFormatters = { }, base64: (content: Buffer) => { const b64 = content.toString('ascii'); - const encoding = b64.match(/[-_]/) ? 'base64url' : 'base64'; - return Buffer.from(b64, encoding).toString('utf8'); + return Buffer.from(b64, 'base64').toString('utf8'); }, markdown: (content: Buffer) => { return content.toString('utf8');