diff --git a/.github/ISSUE_TEMPLATE/http_parser.md b/.github/ISSUE_TEMPLATE/http_parser.md new file mode 100644 index 0000000000..cf34cdc533 --- /dev/null +++ b/.github/ISSUE_TEMPLATE/http_parser.md @@ -0,0 +1,5 @@ +--- +name: "package:http_parser" +about: "Create a bug or file a feature request against package:http_parser." +labels: "package:http_parser" +--- \ No newline at end of file diff --git a/.github/labeler.yml b/.github/labeler.yml index 1fc30162b4..3add1c1717 100644 --- a/.github/labeler.yml +++ b/.github/labeler.yml @@ -20,6 +20,10 @@ - changed-files: - any-glob-to-any-file: 'pkgs/http2/**' +'package:http_parser': + - changed-files: + - any-glob-to-any-file: 'pkgs/http_parser/**' + 'package:http_client_conformance_tests': - changed-files: - any-glob-to-any-file: 'pkgs/http_client_conformance_tests/**' diff --git a/.github/workflows/http_parser.yaml b/.github/workflows/http_parser.yaml new file mode 100644 index 0000000000..b4dd271d31 --- /dev/null +++ b/.github/workflows/http_parser.yaml @@ -0,0 +1,74 @@ +name: package:http_parser + +on: + push: + branches: + - master + paths: + - '.github/workflows/http_parser.yaml' + - 'pkgs/http_parser/**' + pull_request: + paths: + - '.github/workflows/http_parser.yaml' + - 'pkgs/http_parser/**' + schedule: + - cron: "0 0 * * 0" + +env: + PUB_ENVIRONMENT: bot.github + +defaults: + run: + working-directory: pkgs/http_parser/ + +permissions: read-all + +jobs: + # Check code formatting and static analysis on a single OS (linux) + # against Dart dev. + analyze: + runs-on: ubuntu-latest + strategy: + fail-fast: false + matrix: + sdk: [dev] + steps: + - uses: actions/checkout@d632683dd7b4114ad314bca15554477dd762a938 + - uses: dart-lang/setup-dart@0a8a0fc875eb934c15d08629302413c671d3f672 + with: + sdk: ${{ matrix.sdk }} + - id: install + name: Install dependencies + run: dart pub get + - name: Check formatting + run: dart format --output=none --set-exit-if-changed . + if: always() && steps.install.outcome == 'success' + - name: Analyze code + run: dart analyze --fatal-infos + if: always() && steps.install.outcome == 'success' + + test: + needs: analyze + runs-on: ${{ matrix.os }} + strategy: + fail-fast: false + matrix: + os: [ubuntu-latest] + sdk: [3.4, dev] + steps: + - uses: actions/checkout@d632683dd7b4114ad314bca15554477dd762a938 + - uses: dart-lang/setup-dart@0a8a0fc875eb934c15d08629302413c671d3f672 + with: + sdk: ${{ matrix.sdk }} + - id: install + name: Install dependencies + run: dart pub get + - name: Run VM tests + run: dart test --platform vm + if: always() && steps.install.outcome == 'success' + - name: Run Chrome tests + run: dart test --platform chrome + if: always() && steps.install.outcome == 'success' + - name: Run Chrome tests - wasm + run: dart test --platform chrome --compiler dart2wasm + if: always() && steps.install.outcome == 'success' diff --git a/README.md b/README.md index 0c39b58811..31fc6500b1 100644 --- a/README.md +++ b/README.md @@ -16,6 +16,7 @@ and the browser. | [http](pkgs/http/) | A composable, multi-platform, Future-based API for HTTP requests. | [![pub package](https://img.shields.io/pub/v/http.svg)](https://pub.dev/packages/http) | | [http2](pkgs/http2/) | A HTTP/2 implementation in Dart. | [![pub package](https://img.shields.io/pub/v/http2.svg)](https://pub.dev/packages/http2) | | [http_client_conformance_tests](pkgs/http_client_conformance_tests/) | A library that tests whether implementations of package:http's `Client` class behave as expected. | | +| [http_parser](pkgs/http_parser/) | A platform-independent package for parsing and serializing HTTP formats. | [![pub package](https://img.shields.io/pub/v/http_parser.svg)](https://pub.dev/packages/http_parser) | | [http_profile](pkgs/http_profile/) | A library used by HTTP client authors to integrate with the DevTools Network View. | [![pub package](https://img.shields.io/pub/v/http_profile.svg)](https://pub.dev/packages/http_profile) | | [ok_http](pkgs/ok_http/) | An Android Flutter plugin that provides access to the [OkHttp](https://square.github.io/okhttp/) HTTP client and the OkHttp [WebSocket](https://square.github.io/okhttp/5.x/okhttp/okhttp3/-web-socket/index.html) API. | [![pub package](https://img.shields.io/pub/v/ok_http.svg)](https://pub.dev/packages/ok_http) | | [web_socket](pkgs/web_socket/) | Any easy-to-use library for communicating with WebSockets that has multiple implementations. | [![pub package](https://img.shields.io/pub/v/web_socket.svg)](https://pub.dev/packages/web_socket) | diff --git a/pkgs/http_parser/.gitignore b/pkgs/http_parser/.gitignore new file mode 100644 index 0000000000..ec8eae3f1c --- /dev/null +++ b/pkgs/http_parser/.gitignore @@ -0,0 +1,4 @@ +# Don’t commit the following directories created by pub. +.dart_tool/ +.packages +pubspec.lock diff --git a/pkgs/http_parser/CHANGELOG.md b/pkgs/http_parser/CHANGELOG.md new file mode 100644 index 0000000000..5c56cf7113 --- /dev/null +++ b/pkgs/http_parser/CHANGELOG.md @@ -0,0 +1,146 @@ +## 4.1.1 + +* Move to `dart-lang/http` monorepo. + +## 4.1.0 + +* `CaseInsensitiveMap`: added constructor `fromEntries`. +* Require `package:collection` `^1.19.0` +* Require Dart `^3.4.0` + +## 4.0.2 + +* Remove `package:charcode` from dev_dependencies. + +## 4.0.1 + +* Remove dependency on `package:charcode`. + +## 4.0.0 + +* Stable null safety stable release. + +## 4.0.0-nullsafety + +* Migrate to null safety. + +## 3.1.4 + +* Fixed lints affecting package health score. +* Added an example. + +## 3.1.3 + +* Set max SDK version to `<3.0.0`, and adjust other dependencies. + +## 3.1.2 + +* Require Dart SDK 2.0.0-dev.17.0 or greater. + +* A number of strong-mode fixes. + +## 3.1.1 + +* Fix a logic bug in the `chunkedCoding` codec. It had been producing invalid + output and rejecting valid input. + +## 3.1.0 + +* Add `chunkedCoding`, a `Codec` that supports encoding and decoding the + [chunked transfer coding][]. + +[chunked transfer coding]: https://www.w3.org/Protocols/rfc2616/rfc2616-sec3.html#sec3.6.1 + +## 3.0.2 + +* Support `string_scanner` 1.0.0. + +## 3.0.1 + +* Remove unnecessary dependencies. + +## 3.0.0 + +* All deprecated APIs have been removed. No new APIs have been added. Packages + that would use 3.0.0 as a lower bound should use 2.2.0 instead—for example, + `http_parser: ">=2.2.0 <4.0.0"`. + +* Fix all strong-mode warnings. + +## 2.2.1 + +* Add support for `crypto` 1.0.0. + +## 2.2.0 + +* `WebSocketChannel` has been moved to + [the `web_socket_channel` package][web_socket_channel]. The implementation + here is now deprecated. + +[web_socket_channel]: https://pub.dev/packages/web_socket_channel + +## 2.1.0 + +* Added `WebSocketChannel`, an implementation of `StreamChannel` that's backed + by a `WebSocket`. + +* Deprecated `CompatibleWebSocket` in favor of `WebSocketChannel`. + +## 2.0.0 + +* Removed the `DataUri` class. It's redundant with the `Uri.data` getter that's + coming in Dart 1.14, and the `DataUri.data` field in particular was an invalid + override of that field. + +## 1.1.0 + +* The MIME spec says that media types and their parameter names are + case-insensitive. Accordingly, `MediaType` now uses a case-insensitive map for + its parameters and its `type` and `subtype` fields are now always lowercase. + +## 1.0.0 + +This is 1.0.0 because the API is stable—there are no breaking changes. + +* Added an `AuthenticationChallenge` class for parsing and representing the + value of `WWW-Authenticate` and related headers. + +* Added a `CaseInsensitiveMap` class for representing case-insensitive HTTP + values. + +## 0.0.2+8 + +* Bring in the latest `dart:io` WebSocket code. + +## 0.0.2+7 + +* Add more detail to the readme. + +## 0.0.2+6 + +* Updated homepage URL. + +## 0.0.2+5 + +* Widen the version constraint on the `collection` package. + +## 0.0.2+4 + +* Widen the `string_scanner` version constraint. + +## 0.0.2+3 + +* Fix a library name conflict. + +## 0.0.2+2 + +* Fixes for HTTP date formatting. + +## 0.0.2+1 + +* Minor code refactoring. + +## 0.0.2 + +* Added `CompatibleWebSocket`, for platform- and API-independent support for the + WebSocket API. diff --git a/pkgs/http_parser/LICENSE b/pkgs/http_parser/LICENSE new file mode 100644 index 0000000000..000cd7beca --- /dev/null +++ b/pkgs/http_parser/LICENSE @@ -0,0 +1,27 @@ +Copyright 2014, the Dart project authors. + +Redistribution and use in source and binary forms, with or without +modification, are permitted provided that the following conditions are +met: + + * Redistributions of source code must retain the above copyright + notice, this list of conditions and the following disclaimer. + * Redistributions in binary form must reproduce the above + copyright notice, this list of conditions and the following + disclaimer in the documentation and/or other materials provided + with the distribution. + * Neither the name of Google LLC nor the names of its + contributors may be used to endorse or promote products derived + from this software without specific prior written permission. + +THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS +"AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT +LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR +A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT +OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, +SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT +LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, +DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY +THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT +(INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE +OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. diff --git a/pkgs/http_parser/README.md b/pkgs/http_parser/README.md new file mode 100644 index 0000000000..e9d330699b --- /dev/null +++ b/pkgs/http_parser/README.md @@ -0,0 +1,22 @@ +[![Pub Package](https://img.shields.io/pub/v/http_parser.svg)](https://pub.dartlang.org/packages/http_parser) +[![package publisher](https://img.shields.io/pub/publisher/http_parser.svg)](https://pub.dev/packages/http_parser/publisher) + +`http_parser` is a platform-independent package for parsing and serializing +various HTTP-related formats. It's designed to be usable on both the browser and +the server, and thus avoids referencing any types from `dart:io` or `dart:html`. + +## Features + +* Support for parsing and formatting dates according to [HTTP/1.1][2616], the + HTTP/1.1 standard. + +* A `MediaType` class that represents an HTTP media type, as used in `Accept` + and `Content-Type` headers. This class supports both parsing and formatting + media types according to [HTTP/1.1][2616]. + +* A `WebSocketChannel` class that provides a `StreamChannel` interface for both + the client and server sides of the [WebSocket protocol][6455] independently of + any specific server implementation. + +[2616]: https://www.w3.org/Protocols/rfc2616/rfc2616-sec3.html +[6455]: https://tools.ietf.org/html/rfc6455 diff --git a/pkgs/http_parser/analysis_options.yaml b/pkgs/http_parser/analysis_options.yaml new file mode 100644 index 0000000000..c0bcfca91e --- /dev/null +++ b/pkgs/http_parser/analysis_options.yaml @@ -0,0 +1,33 @@ +# https://dart.dev/tools/analysis#the-analysis-options-file +include: package:dart_flutter_team_lints/analysis_options.yaml + +analyzer: + language: + strict-casts: true + strict-inference: true + strict-raw-types: true + +linter: + rules: + - avoid_bool_literals_in_conditional_expressions + - avoid_classes_with_only_static_members + - avoid_private_typedef_functions + - avoid_redundant_argument_values + - avoid_returning_this + - avoid_unused_constructor_parameters + - avoid_void_async + - cancel_subscriptions + - join_return_with_assignment + - literal_only_boolean_expressions + - missing_whitespace_between_adjacent_strings + - no_adjacent_strings_in_list + - no_runtimeType_toString + - package_api_docs + - prefer_const_declarations + - prefer_expression_function_bodies + - prefer_final_locals + - unnecessary_await_in_return + - unnecessary_breaks + - use_if_null_to_convert_nulls_to_bools + - use_raw_strings + - use_string_buffers diff --git a/pkgs/http_parser/example/example.dart b/pkgs/http_parser/example/example.dart new file mode 100644 index 0000000000..9230b6a542 --- /dev/null +++ b/pkgs/http_parser/example/example.dart @@ -0,0 +1,16 @@ +// Copyright (c) 2020, the Dart project authors. Please see the AUTHORS file +// for details. All rights reserved. Use of this source code is governed by a +// BSD-style license that can be found in the LICENSE file. + +import 'package:http_parser/http_parser.dart'; + +void main() { + final date = DateTime.utc(2014, 9, 9, 9, 9, 9); + print(date); // 2014-09-09 09:09:09.000Z + + final httpDateFormatted = formatHttpDate(date); + print(httpDateFormatted); // Tue, 09 Sep 2014 09:09:09 GMT + + final nowParsed = parseHttpDate(httpDateFormatted); + print(nowParsed); // 2014-09-09 09:09:09.000Z +} diff --git a/pkgs/http_parser/lib/http_parser.dart b/pkgs/http_parser/lib/http_parser.dart new file mode 100644 index 0000000000..77b20c7a05 --- /dev/null +++ b/pkgs/http_parser/lib/http_parser.dart @@ -0,0 +1,9 @@ +// Copyright (c) 2014, the Dart project authors. Please see the AUTHORS file +// for details. All rights reserved. Use of this source code is governed by a +// BSD-style license that can be found in the LICENSE file. + +export 'src/authentication_challenge.dart'; +export 'src/case_insensitive_map.dart'; +export 'src/chunked_coding.dart'; +export 'src/http_date.dart'; +export 'src/media_type.dart'; diff --git a/pkgs/http_parser/lib/src/authentication_challenge.dart b/pkgs/http_parser/lib/src/authentication_challenge.dart new file mode 100644 index 0000000000..7eebc431f9 --- /dev/null +++ b/pkgs/http_parser/lib/src/authentication_challenge.dart @@ -0,0 +1,149 @@ +// Copyright (c) 2015, the Dart project authors. Please see the AUTHORS file +// for details. All rights reserved. Use of this source code is governed by a +// BSD-style license that can be found in the LICENSE file. + +import 'dart:collection'; + +import 'package:string_scanner/string_scanner.dart'; + +import 'case_insensitive_map.dart'; +import 'scan.dart'; +import 'utils.dart'; + +/// A single challenge in a WWW-Authenticate header, parsed as per [RFC 2617][]. +/// +/// [RFC 2617]: http://tools.ietf.org/html/rfc2617 +/// +/// Each WWW-Authenticate header contains one or more challenges, representing +/// valid ways to authenticate with the server. +class AuthenticationChallenge { + /// The scheme describing the type of authentication that's required, for + /// example "basic" or "digest". + /// + /// This is normalized to always be lower-case. + final String scheme; + + /// The parameters describing how to authenticate. + /// + /// The semantics of these parameters are scheme-specific. The keys of this + /// map are case-insensitive. + final Map parameters; + + /// Parses a WWW-Authenticate header, which should contain one or more + /// challenges. + /// + /// Throws a [FormatException] if the header is invalid. + static List parseHeader(String header) => + wrapFormatException('authentication header', header, () { + final scanner = StringScanner(header); + scanner.scan(whitespace); + final challenges = parseList(scanner, () { + final scheme = _scanScheme(scanner, whitespaceName: '" " or "="'); + + // Manually parse the inner list. We need to do some lookahead to + // disambiguate between an auth param and another challenge. + final params = {}; + + // Consume initial empty values. + while (scanner.scan(',')) { + scanner.scan(whitespace); + } + + _scanAuthParam(scanner, params); + + var beforeComma = scanner.position; + while (scanner.scan(',')) { + scanner.scan(whitespace); + + // Empty elements are allowed, but excluded from the results. + if (scanner.matches(',') || scanner.isDone) continue; + + scanner.expect(token, name: 'a token'); + final name = scanner.lastMatch![0]!; + scanner.scan(whitespace); + + // If there's no "=", then this is another challenge rather than a + // parameter for the current challenge. + if (!scanner.scan('=')) { + scanner.position = beforeComma; + break; + } + + scanner.scan(whitespace); + + if (scanner.scan(token)) { + params[name] = scanner.lastMatch![0]!; + } else { + params[name] = expectQuotedString(scanner, + name: 'a token or a quoted string'); + } + + scanner.scan(whitespace); + beforeComma = scanner.position; + } + + return AuthenticationChallenge(scheme, params); + }); + + scanner.expectDone(); + return challenges; + }); + + /// Parses a single WWW-Authenticate challenge value. + /// + /// Throws a [FormatException] if the challenge is invalid. + factory AuthenticationChallenge.parse(String challenge) => + wrapFormatException('authentication challenge', challenge, () { + final scanner = StringScanner(challenge); + scanner.scan(whitespace); + final scheme = _scanScheme(scanner); + + final params = {}; + parseList(scanner, () => _scanAuthParam(scanner, params)); + + scanner.expectDone(); + return AuthenticationChallenge(scheme, params); + }); + + /// Scans a single scheme name and asserts that it's followed by a space. + /// + /// If [whitespaceName] is passed, it's used as the name for exceptions thrown + /// due to invalid trailing whitespace. + static String _scanScheme(StringScanner scanner, {String? whitespaceName}) { + scanner.expect(token, name: 'a token'); + final scheme = scanner.lastMatch![0]!.toLowerCase(); + + scanner.scan(whitespace); + + // The spec specifically requires a space between the scheme and its + // params. + if (scanner.lastMatch == null || !scanner.lastMatch![0]!.contains(' ')) { + scanner.expect(' ', name: whitespaceName); + } + + return scheme; + } + + /// Scans a single authentication parameter and stores its result in [params]. + static void _scanAuthParam( + StringScanner scanner, Map params) { + scanner.expect(token, name: 'a token'); + final name = scanner.lastMatch![0]!; + scanner.scan(whitespace); + scanner.expect('='); + scanner.scan(whitespace); + + if (scanner.scan(token)) { + params[name] = scanner.lastMatch![0]!; + } else { + params[name] = + expectQuotedString(scanner, name: 'a token or a quoted string'); + } + + scanner.scan(whitespace); + } + + /// Creates a new challenge value with [scheme] and [parameters]. + AuthenticationChallenge(this.scheme, Map parameters) + : parameters = UnmodifiableMapView(CaseInsensitiveMap.from(parameters)); +} diff --git a/pkgs/http_parser/lib/src/case_insensitive_map.dart b/pkgs/http_parser/lib/src/case_insensitive_map.dart new file mode 100644 index 0000000000..ed344e6f24 --- /dev/null +++ b/pkgs/http_parser/lib/src/case_insensitive_map.dart @@ -0,0 +1,25 @@ +// Copyright (c) 2015, the Dart project authors. Please see the AUTHORS file +// for details. All rights reserved. Use of this source code is governed by a +// BSD-style license that can be found in the LICENSE file. + +import 'package:collection/collection.dart'; + +/// A map from case-insensitive strings to values. +/// +/// Much of HTTP is case-insensitive, so this is useful to have pre-defined. +class CaseInsensitiveMap extends CanonicalizedMap { + /// Creates an empty case-insensitive map. + CaseInsensitiveMap() : super(_canonicalizer); + + /// Creates a case-insensitive map that is initialized with the key/value + /// pairs of [other]. + CaseInsensitiveMap.from(Map other) + : super.from(other, _canonicalizer); + + /// Creates a case-insensitive map that is initialized with the key/value + /// pairs of [entries]. + CaseInsensitiveMap.fromEntries(Iterable> entries) + : super.fromEntries(entries, _canonicalizer); + + static String _canonicalizer(String key) => key.toLowerCase(); +} diff --git a/pkgs/http_parser/lib/src/chunked_coding.dart b/pkgs/http_parser/lib/src/chunked_coding.dart new file mode 100644 index 0000000000..2b496a9bf9 --- /dev/null +++ b/pkgs/http_parser/lib/src/chunked_coding.dart @@ -0,0 +1,42 @@ +// Copyright (c) 2016, the Dart project authors. Please see the AUTHORS file +// for details. All rights reserved. Use of this source code is governed by a +// BSD-style license that can be found in the LICENSE file. + +import 'dart:convert'; + +import 'chunked_coding/decoder.dart'; +import 'chunked_coding/encoder.dart'; + +export 'chunked_coding/decoder.dart' hide chunkedCodingDecoder; +export 'chunked_coding/encoder.dart' hide chunkedCodingEncoder; + +/// The canonical instance of [ChunkedCodingCodec]. +const chunkedCoding = ChunkedCodingCodec._(); + +/// A codec that encodes and decodes the [chunked transfer coding][]. +/// +/// [chunked transfer coding]: https://www.w3.org/Protocols/rfc2616/rfc2616-sec3.html#sec3.6.1 +/// +/// The [encoder] creates a *single* chunked message for each call to +/// [ChunkedCodingEncoder.convert] or +/// [ChunkedCodingEncoder.startChunkedConversion]. This means that it will +/// always add an end-of-message footer once conversion has finished. It doesn't +/// support generating chunk extensions or trailing headers. +/// +/// Similarly, the [decoder] decodes a *single* chunked message into a stream of +/// byte arrays that must be concatenated to get the full list (like most Dart +/// byte streams). It doesn't support decoding a stream that contains multiple +/// chunked messages, nor does it support a stream that contains chunked data +/// mixed with other types of data. +/// +/// Currently, [decoder] will fail to parse chunk extensions and trailing +/// headers. It may be updated to silently ignore them in the future. +class ChunkedCodingCodec extends Codec, List> { + @override + ChunkedCodingEncoder get encoder => chunkedCodingEncoder; + + @override + ChunkedCodingDecoder get decoder => chunkedCodingDecoder; + + const ChunkedCodingCodec._(); +} diff --git a/pkgs/http_parser/lib/src/chunked_coding/charcodes.dart b/pkgs/http_parser/lib/src/chunked_coding/charcodes.dart new file mode 100644 index 0000000000..4d5e451f93 --- /dev/null +++ b/pkgs/http_parser/lib/src/chunked_coding/charcodes.dart @@ -0,0 +1,36 @@ +// Copyright (c) 2021, the Dart project authors. Please see the AUTHORS file +// for details. All rights reserved. Use of this source code is governed by a +// BSD-style license that can be found in the LICENSE file. + +/// "Line feed" control character. +const int $lf = 0x0a; + +/// "Carriage return" control character. +const int $cr = 0x0d; + +/// Character `0`. +const int $0 = 0x30; + +/// Character `1`. +const int $1 = 0x31; + +/// Character `3`. +const int $3 = 0x33; + +/// Character `4`. +const int $4 = 0x34; + +/// Character `7`. +const int $7 = 0x37; + +/// Character `A`. +const int $A = 0x41; + +/// Character `q`. +const int $q = 0x71; + +/// Character `a`. +const int $a = 0x61; + +/// Character `f`. +const int $f = 0x66; diff --git a/pkgs/http_parser/lib/src/chunked_coding/decoder.dart b/pkgs/http_parser/lib/src/chunked_coding/decoder.dart new file mode 100644 index 0000000000..9eb8e93b4c --- /dev/null +++ b/pkgs/http_parser/lib/src/chunked_coding/decoder.dart @@ -0,0 +1,235 @@ +// Copyright (c) 2016, the Dart project authors. Please see the AUTHORS file +// for details. All rights reserved. Use of this source code is governed by a +// BSD-style license that can be found in the LICENSE file. + +import 'dart:async'; +import 'dart:convert'; +import 'dart:math' as math; +import 'dart:typed_data'; + +import 'package:typed_data/typed_data.dart'; + +import 'charcodes.dart'; + +/// The canonical instance of [ChunkedCodingDecoder]. +const chunkedCodingDecoder = ChunkedCodingDecoder._(); + +/// A converter that decodes byte arrays into chunks with size tags. +class ChunkedCodingDecoder extends Converter, List> { + const ChunkedCodingDecoder._(); + + @override + List convert(List input) { + final sink = _Sink(StreamController()); + final output = sink._decode(input, 0, input.length); + if (sink._state == _State.end) return output; + + throw FormatException('Input ended unexpectedly.', input, input.length); + } + + @override + ByteConversionSink startChunkedConversion(Sink> sink) => + _Sink(sink); +} + +/// A conversion sink for the chunked transfer encoding. +class _Sink extends ByteConversionSinkBase { + /// The underlying sink to which decoded byte arrays will be passed. + final Sink> _sink; + + /// The current state of the sink's parsing. + var _state = _State.boundary; + + /// The size of the chunk being parsed. + /// + /// Only assigned and used within [_decode]. + late int _size; + + _Sink(this._sink); + + @override + void add(List chunk) => addSlice(chunk, 0, chunk.length, false); + + @override + void addSlice(List chunk, int start, int end, bool isLast) { + RangeError.checkValidRange(start, end, chunk.length); + final output = _decode(chunk, start, end); + if (output.isNotEmpty) _sink.add(output); + if (isLast) _close(chunk, end); + } + + @override + void close() => _close(); + + /// Like [close], but includes [chunk] and [index] in the [FormatException] if + /// one is thrown. + void _close([List? chunk, int? index]) { + if (_state != _State.end) { + throw FormatException('Input ended unexpectedly.', chunk, index); + } + + _sink.close(); + } + + /// Decodes the data in [bytes] from [start] to [end]. + Uint8List _decode(List bytes, int start, int end) { + /// Throws a [FormatException] if `bytes[start] != $char`. Uses [name] to + /// describe the character in the exception text. + void assertCurrentChar(int char, String name) { + if (bytes[start] != char) { + throw FormatException('Expected $name.', bytes, start); + } + } + + final buffer = Uint8Buffer(); + while (start != end) { + switch (_state) { + case _State.boundary: + _size = _digitForByte(bytes, start); + _state = _State.size; + start++; + + case _State.size: + if (bytes[start] == $cr) { + _state = _State.sizeBeforeLF; + } else { + // Shift four bits left since a single hex digit contains four bits + // of information. + _size = (_size << 4) + _digitForByte(bytes, start); + } + start++; + + case _State.sizeBeforeLF: + assertCurrentChar($lf, 'LF'); + _state = _size == 0 ? _State.endBeforeCR : _State.body; + start++; + + case _State.body: + final chunkEnd = math.min(end, start + _size); + buffer.addAll(bytes, start, chunkEnd); + _size -= chunkEnd - start; + start = chunkEnd; + if (_size == 0) _state = _State.bodyBeforeCR; + + case _State.bodyBeforeCR: + assertCurrentChar($cr, 'CR'); + _state = _State.bodyBeforeLF; + start++; + + case _State.bodyBeforeLF: + assertCurrentChar($lf, 'LF'); + _state = _State.boundary; + start++; + + case _State.endBeforeCR: + assertCurrentChar($cr, 'CR'); + _state = _State.endBeforeLF; + start++; + + case _State.endBeforeLF: + assertCurrentChar($lf, 'LF'); + _state = _State.end; + start++; + + case _State.end: + throw FormatException('Expected no more data.', bytes, start); + } + } + return buffer.buffer.asUint8List(0, buffer.length); + } + + /// Returns the hex digit (0 through 15) corresponding to the byte at index + /// [index] in [bytes]. + /// + /// If the given byte isn't a hexadecimal ASCII character, throws a + /// [FormatException]. + int _digitForByte(List bytes, int index) { + // If the byte is a numeral, get its value. XOR works because 0 in ASCII is + // `0b110000` and the other numerals come after it in ascending order and + // take up at most four bits. + // + // We check for digits first because it ensures there's only a single branch + // for 10 out of 16 of the expected cases. We don't count the `digit >= 0` + // check because branch prediction will always work on it for valid data. + final byte = bytes[index]; + final digit = $0 ^ byte; + if (digit <= 9) { + if (digit >= 0) return digit; + } else { + // If the byte is an uppercase letter, convert it to lowercase. This works + // because uppercase letters in ASCII are exactly `0b100000 = 0x20` less + // than lowercase letters, so if we ensure that that bit is 1 we ensure + // that the letter is lowercase. + final letter = 0x20 | byte; + if ($a <= letter && letter <= $f) return letter - $a + 10; + } + + throw FormatException( + 'Invalid hexadecimal byte 0x${byte.toRadixString(16).toUpperCase()}.', + bytes, + index); + } +} + +/// An enumeration of states that [_Sink] can exist in when decoded a chunked +/// message. +enum _State { + /// The parser has fully parsed one chunk and is expecting the header for the + /// next chunk. + /// + /// Transitions to [size]. + boundary('boundary'), + + /// The parser has parsed at least one digit of the chunk size header, but has + /// not yet parsed the `CR LF` sequence that indicates the end of that header. + /// + /// Transitions to [sizeBeforeLF]. + size('size'), + + /// The parser has parsed the chunk size header and the CR character after it, + /// but not the LF. + /// + /// Transitions to [body] or [bodyBeforeCR]. + sizeBeforeLF('size before LF'), + + /// The parser has parsed a chunk header and possibly some of the body, but + /// still needs to consume more bytes. + /// + /// Transitions to [bodyBeforeCR]. + body('body'), + + // The parser has parsed all the bytes in a chunk body but not the CR LF + // sequence that follows it. + // + // Transitions to [bodyBeforeLF]. + bodyBeforeCR('body before CR'), + + // The parser has parsed all the bytes in a chunk body and the CR that follows + // it, but not the LF after that. + // + // Transitions to [boundary]. + bodyBeforeLF('body before LF'), + + /// The parser has parsed the final empty chunk but not the CR LF sequence + /// that follows it. + /// + /// Transitions to [endBeforeLF]. + endBeforeCR('end before CR'), + + /// The parser has parsed the final empty chunk and the CR that follows it, + /// but not the LF after that. + /// + /// Transitions to [end]. + endBeforeLF('end before LF'), + + /// The parser has parsed the final empty chunk as well as the CR LF that + /// follows, and expects no more data. + end('end'); + + const _State(this.name); + + final String name; + + @override + String toString() => name; +} diff --git a/pkgs/http_parser/lib/src/chunked_coding/encoder.dart b/pkgs/http_parser/lib/src/chunked_coding/encoder.dart new file mode 100644 index 0000000000..deb4906608 --- /dev/null +++ b/pkgs/http_parser/lib/src/chunked_coding/encoder.dart @@ -0,0 +1,83 @@ +// Copyright (c) 2016, the Dart project authors. Please see the AUTHORS file +// for details. All rights reserved. Use of this source code is governed by a +// BSD-style license that can be found in the LICENSE file. + +import 'dart:convert'; +import 'dart:typed_data'; + +import 'charcodes.dart'; + +/// The canonical instance of [ChunkedCodingEncoder]. +const chunkedCodingEncoder = ChunkedCodingEncoder._(); + +/// The chunk indicating that the chunked message has finished. +final _doneChunk = Uint8List.fromList([$0, $cr, $lf, $cr, $lf]); + +/// A converter that encodes byte arrays into chunks with size tags. +class ChunkedCodingEncoder extends Converter, List> { + const ChunkedCodingEncoder._(); + + @override + List convert(List input) => + _convert(input, 0, input.length, isLast: true); + + @override + ByteConversionSink startChunkedConversion(Sink> sink) => + _Sink(sink); +} + +/// A conversion sink for the chunked transfer encoding. +class _Sink extends ByteConversionSinkBase { + /// The underlying sink to which encoded byte arrays will be passed. + final Sink> _sink; + + _Sink(this._sink); + + @override + void add(List chunk) { + _sink.add(_convert(chunk, 0, chunk.length)); + } + + @override + void addSlice(List chunk, int start, int end, bool isLast) { + RangeError.checkValidRange(start, end, chunk.length); + _sink.add(_convert(chunk, start, end, isLast: isLast)); + if (isLast) _sink.close(); + } + + @override + void close() { + _sink.add(_doneChunk); + _sink.close(); + } +} + +/// Returns a new list a chunked transfer encoding header followed by the slice +/// of [bytes] from [start] to [end]. +/// +/// If [isLast] is `true`, this adds the footer that indicates that the chunked +/// message is complete. +List _convert(List bytes, int start, int end, {bool isLast = false}) { + if (end == start) return isLast ? _doneChunk : const []; + + final size = end - start; + final sizeInHex = size.toRadixString(16); + final footerSize = isLast ? _doneChunk.length : 0; + + // Add 4 for the CRLF sequences that follow the size header and the bytes. + final list = Uint8List(sizeInHex.length + 4 + size + footerSize); + list.setRange(0, sizeInHex.length, sizeInHex.codeUnits); + + var cursor = sizeInHex.length; + list[cursor++] = $cr; + list[cursor++] = $lf; + list.setRange(cursor, cursor + end - start, bytes, start); + cursor += end - start; + list[cursor++] = $cr; + list[cursor++] = $lf; + + if (isLast) { + list.setRange(list.length - footerSize, list.length, _doneChunk); + } + return list; +} diff --git a/pkgs/http_parser/lib/src/http_date.dart b/pkgs/http_parser/lib/src/http_date.dart new file mode 100644 index 0000000000..0cedd9a6d8 --- /dev/null +++ b/pkgs/http_parser/lib/src/http_date.dart @@ -0,0 +1,158 @@ +// Copyright (c) 2014, the Dart project authors. Please see the AUTHORS file +// for details. All rights reserved. Use of this source code is governed by a +// BSD-style license that can be found in the LICENSE file. + +import 'package:string_scanner/string_scanner.dart'; + +import 'utils.dart'; + +const _weekdays = ['Mon', 'Tue', 'Wed', 'Thu', 'Fri', 'Sat', 'Sun']; +const _months = [ + 'Jan', + 'Feb', + 'Mar', + 'Apr', + 'May', + 'Jun', + 'Jul', + 'Aug', + 'Sep', + 'Oct', + 'Nov', + 'Dec' +]; + +final _shortWeekdayRegExp = RegExp(r'Mon|Tue|Wed|Thu|Fri|Sat|Sun'); +final _longWeekdayRegExp = + RegExp(r'Monday|Tuesday|Wednesday|Thursday|Friday|Saturday|Sunday'); +final _monthRegExp = RegExp(r'Jan|Feb|Mar|Apr|May|Jun|Jul|Aug|Sep|Oct|Nov|Dec'); +final _digitRegExp = RegExp(r'\d+'); + +/// Return a HTTP-formatted string representation of [date]. +/// +/// This follows [RFC 822](http://tools.ietf.org/html/rfc822) as updated by +/// [RFC 1123](http://tools.ietf.org/html/rfc1123). +String formatHttpDate(DateTime date) { + date = date.toUtc(); + final buffer = StringBuffer() + ..write(_weekdays[date.weekday - 1]) + ..write(', ') + ..write(date.day <= 9 ? '0' : '') + ..write(date.day.toString()) + ..write(' ') + ..write(_months[date.month - 1]) + ..write(' ') + ..write(date.year.toString()) + ..write(date.hour <= 9 ? ' 0' : ' ') + ..write(date.hour.toString()) + ..write(date.minute <= 9 ? ':0' : ':') + ..write(date.minute.toString()) + ..write(date.second <= 9 ? ':0' : ':') + ..write(date.second.toString()) + ..write(' GMT'); + return buffer.toString(); +} + +/// Parses an HTTP-formatted date into a UTC [DateTime]. +/// +/// This follows [RFC 2616](http://www.w3.org/Protocols/rfc2616/rfc2616-sec3.html#sec3.3). +/// It will throw a [FormatException] if [date] is invalid. +DateTime parseHttpDate(String date) => + wrapFormatException('HTTP date', date, () { + final scanner = StringScanner(date); + + if (scanner.scan(_longWeekdayRegExp)) { + // RFC 850 starts with a long weekday. + scanner.expect(', '); + final day = _parseInt(scanner, 2); + scanner.expect('-'); + final month = _parseMonth(scanner); + scanner.expect('-'); + final year = 1900 + _parseInt(scanner, 2); + scanner.expect(' '); + final time = _parseTime(scanner); + scanner.expect(' GMT'); + scanner.expectDone(); + + return _makeDateTime(year, month, day, time); + } + + // RFC 1123 and asctime both start with a short weekday. + scanner.expect(_shortWeekdayRegExp); + if (scanner.scan(', ')) { + // RFC 1123 follows the weekday with a comma. + final day = _parseInt(scanner, 2); + scanner.expect(' '); + final month = _parseMonth(scanner); + scanner.expect(' '); + final year = _parseInt(scanner, 4); + scanner.expect(' '); + final time = _parseTime(scanner); + scanner.expect(' GMT'); + scanner.expectDone(); + + return _makeDateTime(year, month, day, time); + } + + // asctime follows the weekday with a space. + scanner.expect(' '); + final month = _parseMonth(scanner); + scanner.expect(' '); + final day = + scanner.scan(' ') ? _parseInt(scanner, 1) : _parseInt(scanner, 2); + scanner.expect(' '); + final time = _parseTime(scanner); + scanner.expect(' '); + final year = _parseInt(scanner, 4); + scanner.expectDone(); + + return _makeDateTime(year, month, day, time); + }); + +/// Parses a short-form month name to a form accepted by [DateTime]. +int _parseMonth(StringScanner scanner) { + scanner.expect(_monthRegExp); + // DateTime uses 1-indexed months. + return _months.indexOf(scanner.lastMatch![0]!) + 1; +} + +/// Parses an int an enforces that it has exactly [digits] digits. +int _parseInt(StringScanner scanner, int digits) { + scanner.expect(_digitRegExp); + if (scanner.lastMatch![0]!.length != digits) { + scanner.error('expected a $digits-digit number.'); + } + + return int.parse(scanner.lastMatch![0]!); +} + +/// Parses an timestamp of the form "HH:MM:SS" on a 24-hour clock. +DateTime _parseTime(StringScanner scanner) { + final hours = _parseInt(scanner, 2); + if (hours >= 24) scanner.error('hours may not be greater than 24.'); + scanner.expect(':'); + + final minutes = _parseInt(scanner, 2); + if (minutes >= 60) scanner.error('minutes may not be greater than 60.'); + scanner.expect(':'); + + final seconds = _parseInt(scanner, 2); + if (seconds >= 60) scanner.error('seconds may not be greater than 60.'); + + return DateTime(1, 1, 1, hours, minutes, seconds); +} + +/// Returns a UTC [DateTime] from the given components. +/// +/// Validates that [day] is a valid day for [month]. If it's not, throws a +/// [FormatException]. +DateTime _makeDateTime(int year, int month, int day, DateTime time) { + final dateTime = + DateTime.utc(year, month, day, time.hour, time.minute, time.second); + + // If [day] was too large, it will cause [month] to overflow. + if (dateTime.month != month) { + throw FormatException("invalid day '$day' for month '$month'."); + } + return dateTime; +} diff --git a/pkgs/http_parser/lib/src/media_type.dart b/pkgs/http_parser/lib/src/media_type.dart new file mode 100644 index 0000000000..814de63467 --- /dev/null +++ b/pkgs/http_parser/lib/src/media_type.dart @@ -0,0 +1,154 @@ +// Copyright (c) 2014, the Dart project authors. Please see the AUTHORS file +// for details. All rights reserved. Use of this source code is governed by a +// BSD-style license that can be found in the LICENSE file. + +import 'package:collection/collection.dart'; +import 'package:string_scanner/string_scanner.dart'; + +import 'case_insensitive_map.dart'; +import 'scan.dart'; +import 'utils.dart'; + +/// A regular expression matching a character that needs to be backslash-escaped +/// in a quoted string. +final _escapedChar = RegExp(r'["\x00-\x1F\x7F]'); + +/// A class representing an HTTP media type, as used in Accept and Content-Type +/// headers. +/// +/// This is immutable; new instances can be created based on an old instance by +/// calling [change]. +class MediaType { + /// The primary identifier of the MIME type. + /// + /// This is always lowercase. + final String type; + + /// The secondary identifier of the MIME type. + /// + /// This is always lowercase. + final String subtype; + + /// The parameters to the media type. + /// + /// This map is immutable and the keys are case-insensitive. + final Map parameters; + + /// The media type's MIME type. + String get mimeType => '$type/$subtype'; + + /// Parses a media type. + /// + /// This will throw a FormatError if the media type is invalid. + factory MediaType.parse(String mediaType) => + // This parsing is based on sections 3.6 and 3.7 of the HTTP spec: + // http://www.w3.org/Protocols/rfc2616/rfc2616-sec3.html. + wrapFormatException('media type', mediaType, () { + final scanner = StringScanner(mediaType); + scanner.scan(whitespace); + scanner.expect(token); + final type = scanner.lastMatch![0]!; + scanner.expect('/'); + scanner.expect(token); + final subtype = scanner.lastMatch![0]!; + scanner.scan(whitespace); + + final parameters = {}; + while (scanner.scan(';')) { + scanner.scan(whitespace); + scanner.expect(token); + final attribute = scanner.lastMatch![0]!; + scanner.expect('='); + + String value; + if (scanner.scan(token)) { + value = scanner.lastMatch![0]!; + } else { + value = expectQuotedString(scanner); + } + + scanner.scan(whitespace); + parameters[attribute] = value; + } + + scanner.expectDone(); + return MediaType(type, subtype, parameters); + }); + + MediaType(String type, String subtype, [Map? parameters]) + : type = type.toLowerCase(), + subtype = subtype.toLowerCase(), + parameters = UnmodifiableMapView( + parameters == null ? {} : CaseInsensitiveMap.from(parameters)); + + /// Returns a copy of this [MediaType] with some fields altered. + /// + /// [type] and [subtype] alter the corresponding fields. [mimeType] is parsed + /// and alters both the [type] and [subtype] fields; it cannot be passed along + /// with [type] or [subtype]. + /// + /// [parameters] overwrites and adds to the corresponding field. If + /// [clearParameters] is passed, it replaces the corresponding field entirely + /// instead. + MediaType change( + {String? type, + String? subtype, + String? mimeType, + Map? parameters, + bool clearParameters = false}) { + if (mimeType != null) { + if (type != null) { + throw ArgumentError('You may not pass both [type] and [mimeType].'); + } else if (subtype != null) { + throw ArgumentError('You may not pass both [subtype] and ' + '[mimeType].'); + } + + final segments = mimeType.split('/'); + if (segments.length != 2) { + throw FormatException('Invalid mime type "$mimeType".'); + } + + type = segments[0]; + subtype = segments[1]; + } + + type ??= this.type; + subtype ??= this.subtype; + parameters ??= {}; + + if (!clearParameters) { + final newParameters = parameters; + parameters = Map.from(this.parameters); + parameters.addAll(newParameters); + } + + return MediaType(type, subtype, parameters); + } + + /// Converts the media type to a string. + /// + /// This will produce a valid HTTP media type. + @override + String toString() { + final buffer = StringBuffer() + ..write(type) + ..write('/') + ..write(subtype); + + parameters.forEach((attribute, value) { + buffer.write('; $attribute='); + if (nonToken.hasMatch(value)) { + buffer + ..write('"') + ..write( + value.replaceAllMapped(_escapedChar, (match) => '\\${match[0]}')) + ..write('"'); + } else { + buffer.write(value); + } + }); + + return buffer.toString(); + } +} diff --git a/pkgs/http_parser/lib/src/scan.dart b/pkgs/http_parser/lib/src/scan.dart new file mode 100644 index 0000000000..96fb8ae3f5 --- /dev/null +++ b/pkgs/http_parser/lib/src/scan.dart @@ -0,0 +1,70 @@ +// Copyright (c) 2015, the Dart project authors. Please see the AUTHORS file +// for details. All rights reserved. Use of this source code is governed by a +// BSD-style license that can be found in the LICENSE file. + +import 'package:string_scanner/string_scanner.dart'; + +/// An HTTP token. +final token = RegExp(r'[^()<>@,;:"\\/[\]?={} \t\x00-\x1F\x7F]+'); + +/// Linear whitespace. +final _lws = RegExp(r'(?:\r\n)?[ \t]+'); + +/// A quoted string. +final _quotedString = RegExp(r'"(?:[^"\x00-\x1F\x7F]|\\.)*"'); + +/// A quoted pair. +final _quotedPair = RegExp(r'\\(.)'); + +/// A character that is *not* a valid HTTP token. +final nonToken = RegExp(r'[()<>@,;:"\\/\[\]?={} \t\x00-\x1F\x7F]'); + +/// A regular expression matching any number of [_lws] productions in a row. +final whitespace = RegExp('(?:${_lws.pattern})*'); + +/// Parses a list of elements, as in `1#element` in the HTTP spec. +/// +/// [scanner] is used to parse the elements, and [parseElement] is used to parse +/// each one individually. The values returned by [parseElement] are collected +/// in a list and returned. +/// +/// Once this is finished, [scanner] will be at the next non-LWS character in +/// the string, or the end of the string. +List parseList(StringScanner scanner, T Function() parseElement) { + final result = []; + + // Consume initial empty values. + while (scanner.scan(',')) { + scanner.scan(whitespace); + } + + result.add(parseElement()); + scanner.scan(whitespace); + + while (scanner.scan(',')) { + scanner.scan(whitespace); + + // Empty elements are allowed, but excluded from the results. + if (scanner.matches(',') || scanner.isDone) continue; + + result.add(parseElement()); + scanner.scan(whitespace); + } + + return result; +} + +/// Parses a single quoted string, and returns its contents. +/// +/// If [name] is passed, it's used to describe the expected value if it's not +/// found. +String expectQuotedString( + StringScanner scanner, { + String name = 'quoted string', +}) { + scanner.expect(_quotedString, name: name); + final string = scanner.lastMatch![0]!; + return string + .substring(1, string.length - 1) + .replaceAllMapped(_quotedPair, (match) => match[1]!); +} diff --git a/pkgs/http_parser/lib/src/utils.dart b/pkgs/http_parser/lib/src/utils.dart new file mode 100644 index 0000000000..ca00fd3af5 --- /dev/null +++ b/pkgs/http_parser/lib/src/utils.dart @@ -0,0 +1,21 @@ +// Copyright (c) 2015, the Dart project authors. Please see the AUTHORS file +// for details. All rights reserved. Use of this source code is governed by a +// BSD-style license that can be found in the LICENSE file. + +import 'package:source_span/source_span.dart'; + +/// Runs [body] and wraps any format exceptions it produces. +/// +/// [name] should describe the type of thing being parsed, and [value] should be +/// its actual value. +T wrapFormatException(String name, String value, T Function() body) { + try { + return body(); + } on SourceSpanFormatException catch (error) { + throw SourceSpanFormatException( + 'Invalid $name: ${error.message}', error.span, error.source); + } on FormatException catch (error) { + throw FormatException( + 'Invalid $name "$value": ${error.message}', error.source, error.offset); + } +} diff --git a/pkgs/http_parser/pubspec.yaml b/pkgs/http_parser/pubspec.yaml new file mode 100644 index 0000000000..13888276f1 --- /dev/null +++ b/pkgs/http_parser/pubspec.yaml @@ -0,0 +1,18 @@ +name: http_parser +version: 4.1.1 +description: >- + A platform-independent package for parsing and serializing HTTP formats. +repository: https://github.com/dart-lang/http/tree/master/pkgs/http_parser + +environment: + sdk: ^3.4.0 + +dependencies: + collection: ^1.19.0 + source_span: ^1.8.0 + string_scanner: ^1.1.0 + typed_data: ^1.3.0 + +dev_dependencies: + dart_flutter_team_lints: ^3.0.0 + test: ^1.16.6 diff --git a/pkgs/http_parser/test/authentication_challenge_test.dart b/pkgs/http_parser/test/authentication_challenge_test.dart new file mode 100644 index 0000000000..52d798ca3a --- /dev/null +++ b/pkgs/http_parser/test/authentication_challenge_test.dart @@ -0,0 +1,142 @@ +// Copyright (c) 2015, the Dart project authors. Please see the AUTHORS file +// for details. All rights reserved. Use of this source code is governed by a +// BSD-style license that can be found in the LICENSE file. + +import 'package:http_parser/http_parser.dart'; +import 'package:test/test.dart'; + +void main() { + group('parse', () { + _singleChallengeTests(AuthenticationChallenge.parse); + }); + + group('parseHeader', () { + group('with a single challenge', () { + _singleChallengeTests((challenge) { + final challenges = AuthenticationChallenge.parseHeader(challenge); + expect(challenges, hasLength(1)); + return challenges.single; + }); + }); + + test('parses multiple challenges', () { + final challenges = AuthenticationChallenge.parseHeader( + 'scheme1 realm=fblthp, scheme2 realm=asdfg'); + expect(challenges, hasLength(2)); + expect(challenges.first.scheme, equals('scheme1')); + expect(challenges.first.parameters, equals({'realm': 'fblthp'})); + expect(challenges.last.scheme, equals('scheme2')); + expect(challenges.last.parameters, equals({'realm': 'asdfg'})); + }); + + test('parses multiple challenges with multiple parameters', () { + final challenges = AuthenticationChallenge.parseHeader( + 'scheme1 realm=fblthp, foo=bar, scheme2 realm=asdfg, baz=bang'); + expect(challenges, hasLength(2)); + + expect(challenges.first.scheme, equals('scheme1')); + expect(challenges.first.parameters, + equals({'realm': 'fblthp', 'foo': 'bar'})); + + expect(challenges.last.scheme, equals('scheme2')); + expect(challenges.last.parameters, + equals({'realm': 'asdfg', 'baz': 'bang'})); + }); + }); +} + +/// Tests to run for parsing a single challenge. +/// +/// These are run on both [AuthenticationChallenge.parse] and +/// [AuthenticationChallenge.parseHeader], since they use almost entirely +/// separate code paths. +void _singleChallengeTests( + AuthenticationChallenge Function(String challenge) parseChallenge) { + test('parses a simple challenge', () { + final challenge = parseChallenge('scheme realm=fblthp'); + expect(challenge.scheme, equals('scheme')); + expect(challenge.parameters, equals({'realm': 'fblthp'})); + }); + + test('parses multiple parameters', () { + final challenge = parseChallenge('scheme realm=fblthp, foo=bar, baz=qux'); + expect(challenge.scheme, equals('scheme')); + expect(challenge.parameters, + equals({'realm': 'fblthp', 'foo': 'bar', 'baz': 'qux'})); + }); + + test('parses quoted string parameters', () { + final challenge = + parseChallenge('scheme realm="fblthp, foo=bar", baz="qux"'); + expect(challenge.scheme, equals('scheme')); + expect(challenge.parameters, + equals({'realm': 'fblthp, foo=bar', 'baz': 'qux'})); + }); + + test('normalizes the case of the scheme', () { + final challenge = parseChallenge('ScHeMe realm=fblthp'); + expect(challenge.scheme, equals('scheme')); + expect(challenge.parameters, equals({'realm': 'fblthp'})); + }); + + test('normalizes the case of the parameter name', () { + final challenge = parseChallenge('scheme ReAlM=fblthp'); + expect(challenge.scheme, equals('scheme')); + expect(challenge.parameters, containsPair('realm', 'fblthp')); + }); + + test("doesn't normalize the case of the parameter value", () { + final challenge = parseChallenge('scheme realm=FbLtHp'); + expect(challenge.scheme, equals('scheme')); + expect(challenge.parameters, containsPair('realm', 'FbLtHp')); + expect(challenge.parameters, isNot(containsPair('realm', 'fblthp'))); + }); + + test('allows extra whitespace', () { + final challenge = parseChallenge( + ' scheme\t \trealm\t = \tfblthp\t, \tfoo\t\r\n =\tbar\t'); + expect(challenge.scheme, equals('scheme')); + expect(challenge.parameters, equals({'realm': 'fblthp', 'foo': 'bar'})); + }); + + test('allows an empty parameter', () { + final challenge = parseChallenge('scheme realm=fblthp, , foo=bar'); + expect(challenge.scheme, equals('scheme')); + expect(challenge.parameters, equals({'realm': 'fblthp', 'foo': 'bar'})); + }); + + test('allows a leading comma', () { + final challenge = parseChallenge('scheme , realm=fblthp, foo=bar,'); + expect(challenge.scheme, equals('scheme')); + expect(challenge.parameters, equals({'realm': 'fblthp', 'foo': 'bar'})); + }); + + test('allows a trailing comma', () { + final challenge = parseChallenge('scheme realm=fblthp, foo=bar, ,'); + expect(challenge.scheme, equals('scheme')); + expect(challenge.parameters, equals({'realm': 'fblthp', 'foo': 'bar'})); + }); + + test('disallows only a scheme', () { + expect(() => parseChallenge('scheme'), throwsFormatException); + }); + + test('disallows a valueless parameter', () { + expect(() => parseChallenge('scheme realm'), throwsFormatException); + expect(() => parseChallenge('scheme realm='), throwsFormatException); + expect( + () => parseChallenge('scheme realm, foo=bar'), throwsFormatException); + }); + + test('requires a space after the scheme', () { + expect(() => parseChallenge('scheme\trealm'), throwsFormatException); + expect(() => parseChallenge('scheme\r\n\trealm='), throwsFormatException); + }); + + test('disallows junk after the parameters', () { + expect( + () => parseChallenge('scheme realm=fblthp foo'), throwsFormatException); + expect(() => parseChallenge('scheme realm=fblthp, foo=bar baz'), + throwsFormatException); + }); +} diff --git a/pkgs/http_parser/test/case_insensitive_map_test.dart b/pkgs/http_parser/test/case_insensitive_map_test.dart new file mode 100644 index 0000000000..7c65850228 --- /dev/null +++ b/pkgs/http_parser/test/case_insensitive_map_test.dart @@ -0,0 +1,35 @@ +// Copyright (c) 2015, the Dart project authors. Please see the AUTHORS file +// for details. All rights reserved. Use of this source code is governed by a +// BSD-style license that can be found in the LICENSE file. + +import 'package:http_parser/http_parser.dart'; +import 'package:test/test.dart'; + +void main() { + test('provides case-insensitive access to the map', () { + final map = CaseInsensitiveMap(); + map['fOo'] = 'bAr'; + expect(map, containsPair('FoO', 'bAr')); + + map['foo'] = 'baz'; + expect(map, containsPair('FOO', 'baz')); + }); + + test('stores the original key cases', () { + final map = CaseInsensitiveMap(); + map['fOo'] = 'bAr'; + expect(map, equals({'fOo': 'bAr'})); + }); + + test('.from() converts an existing map', () { + final map = CaseInsensitiveMap.from({'fOo': 'bAr'}); + expect(map, containsPair('FoO', 'bAr')); + expect(map, equals({'fOo': 'bAr'})); + }); + + test('.fromEntries() converts an existing map', () { + final map = CaseInsensitiveMap.fromEntries({'fOo': 'bAr'}.entries); + expect(map, containsPair('FoO', 'bAr')); + expect(map, equals({'fOo': 'bAr'})); + }); +} diff --git a/pkgs/http_parser/test/chunked_coding_test.dart b/pkgs/http_parser/test/chunked_coding_test.dart new file mode 100644 index 0000000000..fc4c13ff39 --- /dev/null +++ b/pkgs/http_parser/test/chunked_coding_test.dart @@ -0,0 +1,522 @@ +// Copyright (c) 2016, the Dart project authors. Please see the AUTHORS file +// for details. All rights reserved. Use of this source code is governed by a +// BSD-style license that can be found in the LICENSE file. + +import 'dart:async'; +import 'dart:convert'; + +import 'package:http_parser/http_parser.dart'; +import 'package:http_parser/src/chunked_coding/charcodes.dart'; +import 'package:test/test.dart'; + +void main() { + group('encoder', () { + test('adds a header to the chunk of bytes', () { + expect(chunkedCoding.encode([1, 2, 3]), + equals([$3, $cr, $lf, 1, 2, 3, $cr, $lf, $0, $cr, $lf, $cr, $lf])); + }); + + test('uses hex for chunk size', () { + final data = Iterable.generate(0xA7).toList(); + expect( + chunkedCoding.encode(data), + equals( + [$a, $7, $cr, $lf, ...data, $cr, $lf, $0, $cr, $lf, $cr, $lf])); + }); + + test('just generates a footer for an empty input', () { + expect(chunkedCoding.encode([]), equals([$0, $cr, $lf, $cr, $lf])); + }); + + group('with chunked conversion', () { + late List> results; + late ByteConversionSink sink; + setUp(() { + results = []; + final controller = StreamController>(sync: true); + controller.stream.listen(results.add); + sink = chunkedCoding.encoder.startChunkedConversion(controller.sink); + }); + + test('adds headers to each chunk of bytes', () { + sink.add([1, 2, 3, 4]); + expect( + results, + equals([ + [$4, $cr, $lf, 1, 2, 3, 4, $cr, $lf] + ])); + + sink.add([5, 6, 7]); + expect( + results, + equals([ + [$4, $cr, $lf, 1, 2, 3, 4, $cr, $lf], + [$3, $cr, $lf, 5, 6, 7, $cr, $lf], + ])); + + sink.close(); + expect( + results, + equals([ + [$4, $cr, $lf, 1, 2, 3, 4, $cr, $lf], + [$3, $cr, $lf, 5, 6, 7, $cr, $lf], + [$0, $cr, $lf, $cr, $lf], + ])); + }); + + test('handles empty chunks', () { + sink.add([]); + expect(results, equals([[]])); + + sink.add([1, 2, 3]); + expect( + results, + equals([ + [], + [$3, $cr, $lf, 1, 2, 3, $cr, $lf] + ])); + + sink.add([]); + expect( + results, + equals([ + [], + [$3, $cr, $lf, 1, 2, 3, $cr, $lf], + [] + ])); + + sink.close(); + expect( + results, + equals([ + [], + [$3, $cr, $lf, 1, 2, 3, $cr, $lf], + [], + [$0, $cr, $lf, $cr, $lf], + ])); + }); + + group('addSlice()', () { + test('adds bytes from the specified slice', () { + sink.addSlice([1, 2, 3, 4, 5], 1, 4, false); + expect( + results, + equals([ + [$3, $cr, $lf, 2, 3, 4, $cr, $lf] + ])); + }); + + test("doesn't add a header if the slice is empty", () { + sink.addSlice([1, 2, 3, 4, 5], 1, 1, false); + expect(results, equals([[]])); + }); + + test('adds a footer if isLast is true', () { + sink.addSlice([1, 2, 3, 4, 5], 1, 4, true); + expect( + results, + equals([ + [$3, $cr, $lf, 2, 3, 4, $cr, $lf, $0, $cr, $lf, $cr, $lf] + ])); + + // Setting isLast shuld close the sink. + expect(() => sink.add([]), throwsStateError); + }); + + group('disallows', () { + test('start < 0', () { + expect(() => sink.addSlice([1, 2, 3, 4, 5], -1, 4, false), + throwsRangeError); + }); + + test('start > end', () { + expect(() => sink.addSlice([1, 2, 3, 4, 5], 3, 2, false), + throwsRangeError); + }); + + test('end > length', () { + expect(() => sink.addSlice([1, 2, 3, 4, 5], 1, 10, false), + throwsRangeError); + }); + }); + }); + }); + }); + + group('decoder', () { + test('parses chunked data', () { + expect( + chunkedCoding.decode([ + $3, + $cr, + $lf, + 1, + 2, + 3, + $cr, + $lf, + $4, + $cr, + $lf, + 4, + 5, + 6, + 7, + $cr, + $lf, + $0, + $cr, + $lf, + $cr, + $lf, + ]), + equals([1, 2, 3, 4, 5, 6, 7])); + }); + + test('parses hex size', () { + final data = Iterable.generate(0xA7).toList(); + expect( + chunkedCoding.decode( + [$a, $7, $cr, $lf, ...data, $cr, $lf, $0, $cr, $lf, $cr, $lf]), + equals(data)); + }); + + test('parses capital hex size', () { + final data = Iterable.generate(0xA7).toList(); + expect( + chunkedCoding.decode( + [$A, $7, $cr, $lf, ...data, $cr, $lf, $0, $cr, $lf, $cr, $lf]), + equals(data)); + }); + + test('parses an empty message', () { + expect(chunkedCoding.decode([$0, $cr, $lf, $cr, $lf]), isEmpty); + }); + + group('disallows a message', () { + test('that ends without any input', () { + expect(() => chunkedCoding.decode([]), throwsFormatException); + }); + + test('that ends after the size', () { + expect(() => chunkedCoding.decode([$a]), throwsFormatException); + }); + + test('that ends after CR', () { + expect(() => chunkedCoding.decode([$a, $cr]), throwsFormatException); + }); + + test('that ends after LF', () { + expect( + () => chunkedCoding.decode([$a, $cr, $lf]), throwsFormatException); + }); + + test('that ends after insufficient bytes', () { + expect(() => chunkedCoding.decode([$a, $cr, $lf, 1, 2, 3]), + throwsFormatException); + }); + + test("that ends after a chunk's bytes", () { + expect(() => chunkedCoding.decode([$1, $cr, $lf, 1]), + throwsFormatException); + }); + + test("that ends after a chunk's CR", () { + expect(() => chunkedCoding.decode([$1, $cr, $lf, 1, $cr]), + throwsFormatException); + }); + + test("that ends atfter a chunk's LF", () { + expect(() => chunkedCoding.decode([$1, $cr, $lf, 1, $cr, $lf]), + throwsFormatException); + }); + + test('that ends after the empty chunk', () { + expect( + () => chunkedCoding.decode([$0, $cr, $lf]), throwsFormatException); + }); + + test('that ends after the closing CR', () { + expect(() => chunkedCoding.decode([$0, $cr, $lf, $cr]), + throwsFormatException); + }); + + test('with a chunk without a size', () { + expect(() => chunkedCoding.decode([$cr, $lf, $0, $cr, $lf, $cr, $lf]), + throwsFormatException); + }); + + test('with a chunk with a non-hex size', () { + expect( + () => chunkedCoding.decode([$q, $cr, $lf, $0, $cr, $lf, $cr, $lf]), + throwsFormatException); + }); + }); + + group('with chunked conversion', () { + late List> results; + late ByteConversionSink sink; + setUp(() { + results = []; + final controller = StreamController>(sync: true); + controller.stream.listen(results.add); + sink = chunkedCoding.decoder.startChunkedConversion(controller.sink); + }); + + test('decodes each chunk of bytes', () { + sink.add([$4, $cr, $lf, 1, 2, 3, 4, $cr, $lf]); + expect( + results, + equals([ + [1, 2, 3, 4] + ])); + + sink.add([$3, $cr, $lf, 5, 6, 7, $cr, $lf]); + expect( + results, + equals([ + [1, 2, 3, 4], + [5, 6, 7] + ])); + + sink.add([$0, $cr, $lf, $cr, $lf]); + sink.close(); + expect( + results, + equals([ + [1, 2, 3, 4], + [5, 6, 7] + ])); + }); + + test('handles empty chunks', () { + sink.add([]); + expect(results, isEmpty); + + sink.add([$3, $cr, $lf, 1, 2, 3, $cr, $lf]); + expect( + results, + equals([ + [1, 2, 3] + ])); + + sink.add([]); + expect( + results, + equals([ + [1, 2, 3] + ])); + + sink.add([$0, $cr, $lf, $cr, $lf]); + sink.close(); + expect( + results, + equals([ + [1, 2, 3] + ])); + }); + + test('throws if the sink is closed before the message is done', () { + sink.add([$3, $cr, $lf, 1, 2, 3]); + expect(() => sink.close(), throwsFormatException); + }); + + group('preserves state when a byte array ends', () { + test('within chunk size', () { + sink.add([$a]); + expect(results, isEmpty); + + final data = Iterable.generate(0xA7).toList(); + sink.add([$7, $cr, $lf, ...data]); + expect(results, equals([data])); + }); + + test('after chunk size', () { + sink.add([$3]); + expect(results, isEmpty); + + sink.add([$cr, $lf, 1, 2, 3]); + expect( + results, + equals([ + [1, 2, 3] + ])); + }); + + test('after CR', () { + sink.add([$3, $cr]); + expect(results, isEmpty); + + sink.add([$lf, 1, 2, 3]); + expect( + results, + equals([ + [1, 2, 3] + ])); + }); + + test('after LF', () { + sink.add([$3, $cr, $lf]); + expect(results, isEmpty); + + sink.add([1, 2, 3]); + expect( + results, + equals([ + [1, 2, 3] + ])); + }); + + test('after some bytes', () { + sink.add([$3, $cr, $lf, 1, 2]); + expect( + results, + equals([ + [1, 2] + ])); + + sink.add([3]); + expect( + results, + equals([ + [1, 2], + [3] + ])); + }); + + test('after all bytes', () { + sink.add([$3, $cr, $lf, 1, 2, 3]); + expect( + results, + equals([ + [1, 2, 3] + ])); + + sink.add([$cr, $lf, $3, $cr, $lf, 2, 3, 4, $cr, $lf]); + expect( + results, + equals([ + [1, 2, 3], + [2, 3, 4] + ])); + }); + + test('after a post-chunk CR', () { + sink.add([$3, $cr, $lf, 1, 2, 3, $cr]); + expect( + results, + equals([ + [1, 2, 3] + ])); + + sink.add([$lf, $3, $cr, $lf, 2, 3, 4, $cr, $lf]); + expect( + results, + equals([ + [1, 2, 3], + [2, 3, 4] + ])); + }); + + test('after a post-chunk LF', () { + sink.add([$3, $cr, $lf, 1, 2, 3, $cr, $lf]); + expect( + results, + equals([ + [1, 2, 3] + ])); + + sink.add([$3, $cr, $lf, 2, 3, 4, $cr, $lf]); + expect( + results, + equals([ + [1, 2, 3], + [2, 3, 4] + ])); + }); + + test('after empty chunk size', () { + sink.add([$0]); + expect(results, isEmpty); + + sink.add([$cr, $lf, $cr, $lf]); + expect(results, isEmpty); + + sink.close(); + expect(results, isEmpty); + }); + + test('after first empty chunk CR', () { + sink.add([$0, $cr]); + expect(results, isEmpty); + + sink.add([$lf, $cr, $lf]); + expect(results, isEmpty); + + sink.close(); + expect(results, isEmpty); + }); + + test('after first empty chunk LF', () { + sink.add([$0, $cr, $lf]); + expect(results, isEmpty); + + sink.add([$cr, $lf]); + expect(results, isEmpty); + + sink.close(); + expect(results, isEmpty); + }); + + test('after second empty chunk CR', () { + sink.add([$0, $cr, $lf, $cr]); + expect(results, isEmpty); + + sink.add([$lf]); + expect(results, isEmpty); + + sink.close(); + expect(results, isEmpty); + }); + }); + + group('addSlice()', () { + test('adds bytes from the specified slice', () { + sink.addSlice([1, $3, $cr, $lf, 2, 3, 4, 5], 1, 7, false); + expect( + results, + equals([ + [2, 3, 4] + ])); + }); + + test("doesn't decode if the slice is empty", () { + sink.addSlice([1, 2, 3, 4, 5], 1, 1, false); + expect(results, isEmpty); + }); + + test('closes the sink if isLast is true', () { + sink.addSlice([1, $0, $cr, $lf, $cr, $lf, 7], 1, 6, true); + expect(results, isEmpty); + }); + + group('disallows', () { + test('start < 0', () { + expect(() => sink.addSlice([1, 2, 3, 4, 5], -1, 4, false), + throwsRangeError); + }); + + test('start > end', () { + expect(() => sink.addSlice([1, 2, 3, 4, 5], 3, 2, false), + throwsRangeError); + }); + + test('end > length', () { + expect(() => sink.addSlice([1, 2, 3, 4, 5], 1, 10, false), + throwsRangeError); + }); + }); + }); + }); + }); +} diff --git a/pkgs/http_parser/test/example_test.dart b/pkgs/http_parser/test/example_test.dart new file mode 100644 index 0000000000..bbf869f1cb --- /dev/null +++ b/pkgs/http_parser/test/example_test.dart @@ -0,0 +1,22 @@ +import 'dart:io'; + +import 'package:test/test.dart'; + +void main() { + test('validate example', () { + final result = Process.runSync( + Platform.executable, + [ + '--enable-experiment=non-nullable', + 'example/example.dart', + ], + ); + + expect(result.exitCode, 0); + expect(result.stdout, ''' +2014-09-09 09:09:09.000Z +Tue, 09 Sep 2014 09:09:09 GMT +2014-09-09 09:09:09.000Z +'''); + }, testOn: 'vm'); +} diff --git a/pkgs/http_parser/test/http_date_test.dart b/pkgs/http_parser/test/http_date_test.dart new file mode 100644 index 0000000000..d117663f50 --- /dev/null +++ b/pkgs/http_parser/test/http_date_test.dart @@ -0,0 +1,345 @@ +// Copyright (c) 2014, the Dart project authors. Please see the AUTHORS file +// for details. All rights reserved. Use of this source code is governed by a +// BSD-style license that can be found in the LICENSE file. + +import 'package:http_parser/http_parser.dart'; +import 'package:test/test.dart'; + +void main() { + group('format', () { + test('many values with 9', () { + final date = DateTime.utc(2014, 9, 9, 9, 9, 9); + final formatted = formatHttpDate(date); + + expect(formatted, 'Tue, 09 Sep 2014 09:09:09 GMT'); + final parsed = parseHttpDate(formatted); + + expect(parsed, date); + }); + + test('end of year', () { + final date = DateTime.utc(1999, 12, 31, 23, 59, 59); + final formatted = formatHttpDate(date); + + expect(formatted, 'Fri, 31 Dec 1999 23:59:59 GMT'); + final parsed = parseHttpDate(formatted); + + expect(parsed, date); + }); + + test('start of year', () { + final date = DateTime.utc(2000); + final formatted = formatHttpDate(date); + + expect(formatted, 'Sat, 01 Jan 2000 00:00:00 GMT'); + final parsed = parseHttpDate(formatted); + + expect(parsed, date); + }); + }); + + group('parse', () { + group('RFC 1123', () { + test('parses the example date', () { + final date = parseHttpDate('Sun, 06 Nov 1994 08:49:37 GMT'); + expect(date.day, equals(6)); + expect(date.month, equals(DateTime.november)); + expect(date.year, equals(1994)); + expect(date.hour, equals(8)); + expect(date.minute, equals(49)); + expect(date.second, equals(37)); + expect(date.timeZoneName, equals('UTC')); + }); + + test('whitespace is required', () { + expect(() => parseHttpDate('Sun,06 Nov 1994 08:49:37 GMT'), + throwsFormatException); + + expect(() => parseHttpDate('Sun, 06Nov 1994 08:49:37 GMT'), + throwsFormatException); + + expect(() => parseHttpDate('Sun, 06 Nov1994 08:49:37 GMT'), + throwsFormatException); + + expect(() => parseHttpDate('Sun, 06 Nov 199408:49:37 GMT'), + throwsFormatException); + + expect(() => parseHttpDate('Sun, 06 Nov 1994 08:49:37GMT'), + throwsFormatException); + }); + + test('exactly one space is required', () { + expect(() => parseHttpDate('Sun, 06 Nov 1994 08:49:37 GMT'), + throwsFormatException); + + expect(() => parseHttpDate('Sun, 06 Nov 1994 08:49:37 GMT'), + throwsFormatException); + + expect(() => parseHttpDate('Sun, 06 Nov 1994 08:49:37 GMT'), + throwsFormatException); + + expect(() => parseHttpDate('Sun, 06 Nov 1994 08:49:37 GMT'), + throwsFormatException); + + expect(() => parseHttpDate('Sun, 06 Nov 1994 08:49:37 GMT'), + throwsFormatException); + }); + + test('requires precise number lengths', () { + expect(() => parseHttpDate('Sun, 6 Nov 1994 08:49:37 GMT'), + throwsFormatException); + + expect(() => parseHttpDate('Sun, 06 Nov 94 08:49:37 GMT'), + throwsFormatException); + + expect(() => parseHttpDate('Sun, 06 Nov 1994 8:49:37 GMT'), + throwsFormatException); + + expect(() => parseHttpDate('Sun, 06 Nov 1994 08:9:37 GMT'), + throwsFormatException); + + expect(() => parseHttpDate('Sun, 06 Nov 1994 08:49:7 GMT'), + throwsFormatException); + }); + + test('requires reasonable numbers', () { + expect(() => parseHttpDate('Sun, 00 Nov 1994 08:49:37 GMT'), + throwsFormatException); + + expect(() => parseHttpDate('Sun, 31 Nov 1994 08:49:37 GMT'), + throwsFormatException); + + expect(() => parseHttpDate('Sun, 32 Aug 1994 08:49:37 GMT'), + throwsFormatException); + + expect(() => parseHttpDate('Sun, 06 Nov 1994 24:49:37 GMT'), + throwsFormatException); + + expect(() => parseHttpDate('Sun, 06 Nov 1994 08:60:37 GMT'), + throwsFormatException); + + expect(() => parseHttpDate('Sun, 06 Nov 1994 08:49:60 GMT'), + throwsFormatException); + }); + + test('only allows short weekday names', () { + expect(() => parseHttpDate('Sunday, 6 Nov 1994 08:49:37 GMT'), + throwsFormatException); + }); + + test('only allows short month names', () { + expect(() => parseHttpDate('Sun, 6 November 1994 08:49:37 GMT'), + throwsFormatException); + }); + + test('only allows GMT', () { + expect(() => parseHttpDate('Sun, 6 Nov 1994 08:49:37 PST'), + throwsFormatException); + }); + + test('disallows trailing whitespace', () { + expect(() => parseHttpDate('Sun, 6 Nov 1994 08:49:37 GMT '), + throwsFormatException); + }); + }); + + group('RFC 850', () { + test('parses the example date', () { + final date = parseHttpDate('Sunday, 06-Nov-94 08:49:37 GMT'); + expect(date.day, equals(6)); + expect(date.month, equals(DateTime.november)); + expect(date.year, equals(1994)); + expect(date.hour, equals(8)); + expect(date.minute, equals(49)); + expect(date.second, equals(37)); + expect(date.timeZoneName, equals('UTC')); + }); + + test('whitespace is required', () { + expect(() => parseHttpDate('Sunday,06-Nov-94 08:49:37 GMT'), + throwsFormatException); + + expect(() => parseHttpDate('Sunday, 06-Nov-9408:49:37 GMT'), + throwsFormatException); + + expect(() => parseHttpDate('Sunday, 06-Nov-94 08:49:37GMT'), + throwsFormatException); + }); + + test('exactly one space is required', () { + expect(() => parseHttpDate('Sunday, 06-Nov-94 08:49:37 GMT'), + throwsFormatException); + + expect(() => parseHttpDate('Sunday, 06-Nov-94 08:49:37 GMT'), + throwsFormatException); + + expect(() => parseHttpDate('Sunday, 06-Nov-94 08:49:37 GMT'), + throwsFormatException); + }); + + test('requires precise number lengths', () { + expect(() => parseHttpDate('Sunday, 6-Nov-94 08:49:37 GMT'), + throwsFormatException); + + expect(() => parseHttpDate('Sunday, 06-Nov-1994 08:49:37 GMT'), + throwsFormatException); + + expect(() => parseHttpDate('Sunday, 06-Nov-94 8:49:37 GMT'), + throwsFormatException); + + expect(() => parseHttpDate('Sunday, 06-Nov-94 08:9:37 GMT'), + throwsFormatException); + + expect(() => parseHttpDate('Sunday, 06-Nov-94 08:49:7 GMT'), + throwsFormatException); + }); + + test('requires reasonable numbers', () { + expect(() => parseHttpDate('Sunday, 00-Nov-94 08:49:37 GMT'), + throwsFormatException); + + expect(() => parseHttpDate('Sunday, 31-Nov-94 08:49:37 GMT'), + throwsFormatException); + + expect(() => parseHttpDate('Sunday, 32-Aug-94 08:49:37 GMT'), + throwsFormatException); + + expect(() => parseHttpDate('Sunday, 06-Nov-94 24:49:37 GMT'), + throwsFormatException); + + expect(() => parseHttpDate('Sunday, 06-Nov-94 08:60:37 GMT'), + throwsFormatException); + + expect(() => parseHttpDate('Sunday, 06-Nov-94 08:49:60 GMT'), + throwsFormatException); + }); + + test('only allows long weekday names', () { + expect(() => parseHttpDate('Sun, 6-Nov-94 08:49:37 GMT'), + throwsFormatException); + }); + + test('only allows short month names', () { + expect(() => parseHttpDate('Sunday, 6-November-94 08:49:37 GMT'), + throwsFormatException); + }); + + test('only allows GMT', () { + expect(() => parseHttpDate('Sunday, 6-Nov-94 08:49:37 PST'), + throwsFormatException); + }); + + test('disallows trailing whitespace', () { + expect(() => parseHttpDate('Sunday, 6-Nov-94 08:49:37 GMT '), + throwsFormatException); + }); + }); + + group('asctime()', () { + test('parses the example date', () { + final date = parseHttpDate('Sun Nov 6 08:49:37 1994'); + expect(date.day, equals(6)); + expect(date.month, equals(DateTime.november)); + expect(date.year, equals(1994)); + expect(date.hour, equals(8)); + expect(date.minute, equals(49)); + expect(date.second, equals(37)); + expect(date.timeZoneName, equals('UTC')); + }); + + test('parses a date with a two-digit day', () { + final date = parseHttpDate('Sun Nov 16 08:49:37 1994'); + expect(date.day, equals(16)); + expect(date.month, equals(DateTime.november)); + expect(date.year, equals(1994)); + expect(date.hour, equals(8)); + expect(date.minute, equals(49)); + expect(date.second, equals(37)); + expect(date.timeZoneName, equals('UTC')); + }); + + test('whitespace is required', () { + expect(() => parseHttpDate('SunNov 6 08:49:37 1994'), + throwsFormatException); + + expect(() => parseHttpDate('Sun Nov6 08:49:37 1994'), + throwsFormatException); + + expect(() => parseHttpDate('Sun Nov 608:49:37 1994'), + throwsFormatException); + + expect(() => parseHttpDate('Sun Nov 6 08:49:371994'), + throwsFormatException); + }); + + test('the right amount of whitespace is required', () { + expect(() => parseHttpDate('Sun Nov 6 08:49:37 1994'), + throwsFormatException); + + expect(() => parseHttpDate('Sun Nov 6 08:49:37 1994'), + throwsFormatException); + + expect(() => parseHttpDate('Sun Nov 6 08:49:37 1994'), + throwsFormatException); + + expect(() => parseHttpDate('Sun Nov 6 08:49:37 1994'), + throwsFormatException); + + expect(() => parseHttpDate('Sun Nov 6 08:49:37 1994'), + throwsFormatException); + }); + + test('requires precise number lengths', () { + expect(() => parseHttpDate('Sun Nov 016 08:49:37 1994'), + throwsFormatException); + + expect(() => parseHttpDate('Sun Nov 6 8:49:37 1994'), + throwsFormatException); + + expect(() => parseHttpDate('Sun Nov 6 08:9:37 1994'), + throwsFormatException); + + expect(() => parseHttpDate('Sun Nov 6 08:49:7 1994'), + throwsFormatException); + + expect(() => parseHttpDate('Sun Nov 6 08:49:37 94'), + throwsFormatException); + }); + + test('requires reasonable numbers', () { + expect(() => parseHttpDate('Sun Nov 0 08:49:37 1994'), + throwsFormatException); + + expect(() => parseHttpDate('Sun Nov 31 08:49:37 1994'), + throwsFormatException); + + expect(() => parseHttpDate('Sun Aug 32 08:49:37 1994'), + throwsFormatException); + + expect(() => parseHttpDate('Sun Nov 6 24:49:37 1994'), + throwsFormatException); + + expect(() => parseHttpDate('Sun Nov 6 08:60:37 1994'), + throwsFormatException); + + expect(() => parseHttpDate('Sun Nov 6 08:49:60 1994'), + throwsFormatException); + }); + + test('only allows short weekday names', () { + expect(() => parseHttpDate('Sunday Nov 0 08:49:37 1994'), + throwsFormatException); + }); + + test('only allows short month names', () { + expect(() => parseHttpDate('Sun November 0 08:49:37 1994'), + throwsFormatException); + }); + + test('disallows trailing whitespace', () { + expect(() => parseHttpDate('Sun November 0 08:49:37 1994 '), + throwsFormatException); + }); + }); + }); +} diff --git a/pkgs/http_parser/test/media_type_test.dart b/pkgs/http_parser/test/media_type_test.dart new file mode 100644 index 0000000000..9a4226c67b --- /dev/null +++ b/pkgs/http_parser/test/media_type_test.dart @@ -0,0 +1,165 @@ +// Copyright (c) 2014, the Dart project authors. Please see the AUTHORS file +// for details. All rights reserved. Use of this source code is governed by a +// BSD-style license that can be found in the LICENSE file. + +import 'package:http_parser/http_parser.dart'; +import 'package:test/test.dart'; + +void main() { + group('parse', () { + test('parses a simple MIME type', () { + final type = MediaType.parse('text/plain'); + expect(type.type, equals('text')); + expect(type.subtype, equals('plain')); + }); + + test('allows leading whitespace', () { + expect(MediaType.parse(' text/plain').mimeType, equals('text/plain')); + expect(MediaType.parse('\ttext/plain').mimeType, equals('text/plain')); + }); + + test('allows trailing whitespace', () { + expect(MediaType.parse('text/plain ').mimeType, equals('text/plain')); + expect(MediaType.parse('text/plain\t').mimeType, equals('text/plain')); + }); + + test('disallows separators in the MIME type', () { + expect(() => MediaType.parse('te(xt/plain'), throwsFormatException); + expect(() => MediaType.parse('text/pla=in'), throwsFormatException); + }); + + test('disallows whitespace around the slash', () { + expect(() => MediaType.parse('text /plain'), throwsFormatException); + expect(() => MediaType.parse('text/ plain'), throwsFormatException); + }); + + test('parses parameters', () { + final type = MediaType.parse('text/plain;foo=bar;baz=bang'); + expect(type.mimeType, equals('text/plain')); + expect(type.parameters, equals({'foo': 'bar', 'baz': 'bang'})); + }); + + test('allows whitespace around the semicolon', () { + final type = MediaType.parse('text/plain ; foo=bar ; baz=bang'); + expect(type.mimeType, equals('text/plain')); + expect(type.parameters, equals({'foo': 'bar', 'baz': 'bang'})); + }); + + test('disallows whitespace around the equals', () { + expect( + () => MediaType.parse('text/plain; foo =bar'), throwsFormatException); + expect( + () => MediaType.parse('text/plain; foo= bar'), throwsFormatException); + }); + + test('disallows separators in the parameters', () { + expect( + () => MediaType.parse('text/plain; fo:o=bar'), throwsFormatException); + expect( + () => MediaType.parse('text/plain; foo=b@ar'), throwsFormatException); + }); + + test('parses quoted parameters', () { + final type = + MediaType.parse(r'text/plain; foo="bar space"; baz="bang\\escape"'); + expect(type.mimeType, equals('text/plain')); + expect( + type.parameters, equals({'foo': 'bar space', 'baz': r'bang\escape'})); + }); + + test('lower-cases type and subtype', () { + final type = MediaType.parse('TeXt/pLaIn'); + expect(type.type, equals('text')); + expect(type.subtype, equals('plain')); + expect(type.mimeType, equals('text/plain')); + }); + + test('records parameters as case-insensitive', () { + final type = MediaType.parse('test/plain;FoO=bar;bAz=bang'); + expect(type.parameters, equals({'FoO': 'bar', 'bAz': 'bang'})); + expect(type.parameters, containsPair('foo', 'bar')); + expect(type.parameters, containsPair('baz', 'bang')); + }); + }); + + group('change', () { + late MediaType type; + setUp(() { + type = MediaType.parse('text/plain; foo=bar; baz=bang'); + }); + + test('uses the existing fields by default', () { + final newType = type.change(); + expect(newType.type, equals('text')); + expect(newType.subtype, equals('plain')); + expect(newType.parameters, equals({'foo': 'bar', 'baz': 'bang'})); + }); + + test('[type] overrides the existing type', () { + expect(type.change(type: 'new').type, equals('new')); + }); + + test('[subtype] overrides the existing subtype', () { + expect(type.change(subtype: 'new').subtype, equals('new')); + }); + + test('[mimeType] overrides the existing type and subtype', () { + final newType = type.change(mimeType: 'image/png'); + expect(newType.type, equals('image')); + expect(newType.subtype, equals('png')); + }); + + test('[parameters] overrides and adds to existing parameters', () { + expect( + type.change(parameters: {'foo': 'zap', 'qux': 'fblthp'}).parameters, + equals({'foo': 'zap', 'baz': 'bang', 'qux': 'fblthp'})); + }); + + test('[clearParameters] removes existing parameters', () { + expect(type.change(clearParameters: true).parameters, isEmpty); + }); + + test('[clearParameters] with [parameters] removes before adding', () { + final newType = + type.change(parameters: {'foo': 'zap'}, clearParameters: true); + expect(newType.parameters, equals({'foo': 'zap'})); + }); + + test('[type] with [mimeType] is illegal', () { + expect(() => type.change(type: 'new', mimeType: 'image/png'), + throwsArgumentError); + }); + + test('[subtype] with [mimeType] is illegal', () { + expect(() => type.change(subtype: 'new', mimeType: 'image/png'), + throwsArgumentError); + }); + }); + + group('toString', () { + test('serializes a simple MIME type', () { + expect(MediaType('text', 'plain').toString(), equals('text/plain')); + }); + + test('serializes a token parameter as a token', () { + expect(MediaType('text', 'plain', {'foo': 'bar'}).toString(), + equals('text/plain; foo=bar')); + }); + + test('serializes a non-token parameter as a quoted string', () { + expect(MediaType('text', 'plain', {'foo': 'bar baz'}).toString(), + equals('text/plain; foo="bar baz"')); + }); + + test('escapes a quoted string as necessary', () { + expect(MediaType('text', 'plain', {'foo': 'bar"\x7Fbaz'}).toString(), + equals('text/plain; foo="bar\\"\\\x7Fbaz"')); + }); + + test('serializes multiple parameters', () { + expect( + MediaType('text', 'plain', {'foo': 'bar', 'baz': 'bang'}).toString(), + equals('text/plain; foo=bar; baz=bang')); + }); + }); +}