diff --git a/.github/FUNDING.yml b/.github/FUNDING.yml deleted file mode 100644 index 167ceb3..0000000 --- a/.github/FUNDING.yml +++ /dev/null @@ -1,6 +0,0 @@ -# These are supported funding model platforms - -github: [talentlessguy] -ko_fi: v1rtl -liberapay: v1rtl -issuehunt: v1rtl diff --git a/LICENSE b/LICENSE deleted file mode 100644 index 31e046f..0000000 --- a/LICENSE +++ /dev/null @@ -1,21 +0,0 @@ -MIT License - -Copyright (c) 2023 Deno libraries - -Permission is hereby granted, free of charge, to any person obtaining a copy -of this software and associated documentation files (the "Software"), to deal -in the Software without restriction, including without limitation the rights -to use, copy, modify, merge, publish, distribute, sublicense, and/or sell -copies of the Software, and to permit persons to whom the Software is -furnished to do so, subject to the following conditions: - -The above copyright notice and this permission notice shall be included in all -copies or substantial portions of the Software. - -THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR -IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, -FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE -AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER -LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, -OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE -SOFTWARE. diff --git a/README.md b/README.md index 1bcecd8..3dc013d 100644 --- a/README.md +++ b/README.md @@ -1,7 +1,3 @@ # ipfs -Old (forked from ipfs-http-lite) version has been deleted in favor of a rewrite. You can still access it [here](https://deno.land/x/ipfs@0.4.0-wip.3) - -🪐🦕 IPFS client for Deno - -An [ipfs-http-client](https://github.com/ipfs/js-ipfs/blob/master/packages/ipfs-http-client) port for Deno with fixed deps and reuse of imports from source code. +Deno republish of the [ipfs-http-client](https://github.com/ipfs/js-ipfs/tree/master/packages/ipfs-http-client) IPFS client. diff --git a/_vendor/@ipld/dag-json.ts b/_vendor/@ipld/dag-json.ts deleted file mode 100644 index d75333f..0000000 --- a/_vendor/@ipld/dag-json.ts +++ /dev/null @@ -1,207 +0,0 @@ -import { cborgJson, CID, base64 } from '../../deps.ts' -import { Token, Type } from '../cborg/lib.ts' -import type { ByteView } from '../multiformats/types.ts' -import type { ToString } from '../multiformats/link.ts' -import type { TagDecoder } from '../cborg/interface.ts' - -/** - * cidEncoder will receive all Objects during encode, it needs to filter out - * anything that's not a CID and return `null` for that so it's encoded as - * normal. Encoding a CID means replacing it with a `{"/":"}` - * object as per the DAG-JSON spec. - * - */ -function cidEncoder(obj: any): Token[] | null { - if (obj.asCID !== obj && obj['/'] !== obj.bytes) { - return null // any other kind of object - } - const cid = CID.asCID(obj) - /* c8 ignore next 4 */ - // very unlikely case, and it'll probably throw a recursion error in cborg - if (!cid) { - return null - } - const cidString = cid.toString() - - return [ - new Token(Type.map, Infinity, 1), - new Token(Type.string, '/', 1), // key - new Token(Type.string, cidString, cidString.length), // value - new Token(Type.break, undefined, 1) - ] -} - -/** - * bytesEncoder will receive all Uint8Arrays (and friends) during encode, it - * needs to replace it with a `{"/":{"bytes":"Base64ByteString"}}` object as - * per the DAG-JSON spec. - */ -function bytesEncoder(bytes: Uint8Array): Token[] | null { - const bytesString = base64.encode(bytes).slice(1) // no mbase prefix - return [ - new Token(Type.map, Infinity, 1), - new Token(Type.string, '/', 1), // key - new Token(Type.map, Infinity, 1), // value - new Token(Type.string, 'bytes', 5), // inner key - new Token(Type.string, bytesString, bytesString.length), // inner value - new Token(Type.break, undefined, 1), - new Token(Type.break, undefined, 1) - ] -} - -/** - * Intercept all `undefined` values from an object walk and reject the entire - * object if we find one. - */ -function undefinedEncoder(): null { - throw new Error('`undefined` is not supported by the IPLD Data Model and cannot be encoded') -} - -/** - * Intercept all `number` values from an object walk and reject the entire - * object if we find something that doesn't fit the IPLD data model (NaN & - * Infinity). - */ -function numberEncoder(num: number): null { - if (Number.isNaN(num)) { - throw new Error('`NaN` is not supported by the IPLD Data Model and cannot be encoded') - } - if (num === Infinity || num === -Infinity) { - throw new Error('`Infinity` and `-Infinity` is not supported by the IPLD Data Model and cannot be encoded') - } - return null // process with standard number encoder -} - -const encodeOptions = { - typeEncoders: { - Object: cidEncoder, - Uint8Array: bytesEncoder, // TODO: all the typedarrays - Buffer: bytesEncoder, // TODO: all the typedarrays - undefined: undefinedEncoder, - number: numberEncoder - } -} - -/** - * @implements {DecodeTokenizer} - */ -class DagJsonTokenizer extends cborgJson.Tokenizer { - tokenBuffer: Token[] - - constructor(data: Uint8Array, options: Record) { - super(data, options) - this.tokenBuffer = [] - } - - done(): boolean { - return this.tokenBuffer.length === 0 && super.done() - } - - _next(): Token { - if (this.tokenBuffer.length > 0) { - // @ts-ignore https://github.com/Microsoft/TypeScript/issues/30406 - return this.tokenBuffer.pop() - } - return super.next() - } - - /** - * Implements rules outlined in https://github.com/ipld/specs/pull/356 - */ - next(): Token { - const token = this._next() - - if (token.type === Type.map) { - const keyToken = this._next() - if (keyToken.type === Type.string && keyToken.value === '/') { - const valueToken = this._next() - if (valueToken.type === Type.string) { - // *must* be a CID - const breakToken = this._next() // swallow the end-of-map token - if (breakToken.type !== Type.break) { - throw new Error('Invalid encoded CID form') - } - this.tokenBuffer.push(valueToken) // CID.parse will pick this up after our tag token - return new Token(Type.tag, 42, 0) - } - if (valueToken.type === Type.map) { - const innerKeyToken = this._next() - if (innerKeyToken.type === Type.string && innerKeyToken.value === 'bytes') { - const innerValueToken = this._next() - if (innerValueToken.type === Type.string) { - // *must* be Bytes - for (let i = 0; i < 2; i++) { - const breakToken = this._next() // swallow two end-of-map tokens - if (breakToken.type !== Type.break) { - throw new Error('Invalid encoded Bytes form') - } - } - const bytes = base64.decode(`m${innerValueToken.value}`) - return new Token(Type.bytes, bytes, innerValueToken.value.length) - } - this.tokenBuffer.push(innerValueToken) // bail - } - this.tokenBuffer.push(innerKeyToken) // bail - } - this.tokenBuffer.push(valueToken) // bail - } - this.tokenBuffer.push(keyToken) // bail - } - return token - } -} - -const decodeOptions = { - allowIndefinite: false, - allowUndefined: false, - allowNaN: false, - allowInfinity: false, - allowBigInt: true, // this will lead to BigInt for ints outside of - // safe-integer range, which may surprise users - strict: true, - useMaps: false, - rejectDuplicateMapKeys: true, - tags: [] as TagDecoder[] -} - -// we're going to get TAG(42)STRING("bafy...") from the tokenizer so we only need -// to deal with the STRING("bafy...") at this point -decodeOptions.tags[42] = CID.parse - -export const name = 'dag-json' -export const code = 0x0129 - -/** - * @template T - * @param {T} node - * @returns {ByteView} - */ -export const encode = (node: T): ByteView => cborgJson.encode(node, encodeOptions) - -/** - * @template T - * @param {ByteView} data - * @returns {T} - */ -export const decode = (data: ByteView): T => { - // the tokenizer is stateful so we need a single instance of it - const options = Object.assign(decodeOptions, { tokenizer: new DagJsonTokenizer(data, decodeOptions) }) - return cborgJson.decode(data, options) -} - -/** - * @template T - * @param {T} node - * @returns {ToString} - */ -export const format = (node: T): ToString => utf8Decoder.decode(encode(node)) -export { format as stringify } -const utf8Decoder = new TextDecoder() - -/** - * @template T - * @param {ToString} data - * @returns {T} - */ -export const parse = (data: ToString): T => decode(utf8Encoder.encode(data)) -const utf8Encoder = new TextEncoder() diff --git a/_vendor/cborg/bl.ts b/_vendor/cborg/bl.ts deleted file mode 100644 index 012b37e..0000000 --- a/_vendor/cborg/bl.ts +++ /dev/null @@ -1,119 +0,0 @@ -// deno-lint-ignore-file ban-ts-comment -/** - * Bl is a list of byte chunks, similar to https://github.com/rvagg/bl but for - * writing rather than reading. - * A Bl object accepts set() operations for individual bytes and copyTo() for - * inserting byte arrays. These write operations don't automatically increment - * the internal cursor so its "length" won't be changed. Instead, increment() - * must be called to extend its length to cover the inserted data. - * The toBytes() call will convert all internal memory to a single Uint8Array of - * the correct length, truncating any data that is stored but hasn't been - * included by an increment(). - * get() can retrieve a single byte. - * All operations (except toBytes()) take an "offset" argument that will perform - * the write at the offset _from the current cursor_. For most operations this - * will be `0` to write at the current cursor position but it can be ahead of - * the current cursor. Negative offsets probably work but are untested. - */ - -// TODO: ipjs doesn't support this, only for test files: https://github.com/mikeal/ipjs/blob/master/src/package/testFile.js#L39 -import { alloc, concat, slice } from './byte-utils.ts' - -// the ts-ignores in this file are almost all for the `Uint8Array|number[]` duality that exists -// for perf reasons. Consider better approaches to this or removing it entirely, it is quite -// risky because of some assumptions about small chunks === number[] and everything else === Uint8Array. - -const defaultChunkSize = 256 - -export class Bl { - chunkSize: number - cursor: number - maxCursor: number - chunks: (Uint8Array | number[])[] - _initReuseChunk: Uint8Array | number[] | null - - constructor(chunkSize: number = defaultChunkSize) { - this.chunkSize = chunkSize - this.cursor = 0 - this.maxCursor = -1 - - this.chunks = [] - // keep the first chunk around if we can to save allocations for future encodes - - this._initReuseChunk = null - } - - reset() { - this.cursor = 0 - this.maxCursor = -1 - if (this.chunks.length) { - this.chunks = [] - } - if (this._initReuseChunk !== null) { - this.chunks.push(this._initReuseChunk) - this.maxCursor = this._initReuseChunk.length - 1 - } - } - - push(bytes: Uint8Array | number[]) { - let topChunk = this.chunks[this.chunks.length - 1] - const newMax = this.cursor + bytes.length - if (newMax <= this.maxCursor + 1) { - // we have at least one chunk and we can fit these bytes into that chunk - const chunkPos = topChunk.length - (this.maxCursor - this.cursor) - 1 - // @ts-ignore - topChunk.set(bytes, chunkPos) - } else { - // can't fit it in - if (topChunk) { - // trip the last chunk to `cursor` if we need to - const chunkPos = topChunk.length - (this.maxCursor - this.cursor) - 1 - if (chunkPos < topChunk.length) { - // @ts-ignore - this.chunks[this.chunks.length - 1] = topChunk.subarray(0, chunkPos) - this.maxCursor = this.cursor - 1 - } - } - if (bytes.length < 64 && bytes.length < this.chunkSize) { - // make a new chunk and copy the new one into it - topChunk = alloc(this.chunkSize) - this.chunks.push(topChunk) - this.maxCursor += topChunk.length - if (this._initReuseChunk === null) { - this._initReuseChunk = topChunk - } - // @ts-ignore - topChunk.set(bytes, 0) - } else { - // push the new bytes in as its own chunk - this.chunks.push(bytes) - this.maxCursor += bytes.length - } - } - this.cursor += bytes.length - } - - toBytes(reset = false): Uint8Array { - let byts - if (this.chunks.length === 1) { - const chunk = this.chunks[0] - if (reset && this.cursor > chunk.length / 2) { - /* c8 ignore next 2 */ - // @ts-ignore - byts = this.cursor === chunk.length ? chunk : chunk.subarray(0, this.cursor) - this._initReuseChunk = null - this.chunks = [] - } else { - // @ts-ignore - byts = slice(chunk, 0, this.cursor) - } - } else { - // @ts-ignore - byts = concat(this.chunks, this.cursor) - } - if (reset) { - this.reset() - } - return byts - } -} diff --git a/_vendor/cborg/byte-utils.ts b/_vendor/cborg/byte-utils.ts deleted file mode 100644 index 09e6ccd..0000000 --- a/_vendor/cborg/byte-utils.ts +++ /dev/null @@ -1,42 +0,0 @@ -export const slice = - /** - * @param {Uint8Array} bytes - * @param {number} start - * @param {number} end - */ - (bytes: Uint8Array, start: number, end: number) => { - return bytes.slice(start, end) - } - -export const concat = - /* c8 ignore next 19 */ - // eslint-disable-line operator-linebreak - /** - * @param {Uint8Array[]} chunks - * @param {number} length - * @returns {Uint8Array} - */ - (chunks: Uint8Array[], length: number): Uint8Array => { - const out = new Uint8Array(length) - let off = 0 - for (let b of chunks) { - if (off + b.length > out.length) { - // final chunk that's bigger than we need - b = b.subarray(0, out.length - off) - } - out.set(b, off) - off += b.length - } - return out - } - -export const alloc = - /* c8 ignore next 8 */ - // eslint-disable-line operator-linebreak - /** - * @param {number} size - * @returns {Uint8Array} - */ - (size: number): Uint8Array => { - return new Uint8Array(size) - } diff --git a/_vendor/cborg/interface.ts b/_vendor/cborg/interface.ts deleted file mode 100644 index 25b24e9..0000000 --- a/_vendor/cborg/interface.ts +++ /dev/null @@ -1,66 +0,0 @@ -// deno-lint-ignore-file -import { Token } from './lib.ts' -import { Bl } from './bl.ts' - -export type TokenOrNestedTokens = Token | Token[] | TokenOrNestedTokens[] - -export interface Reference { - parent: Reference | undefined - obj: unknown | unknown[] - includes(obj: unknown | unknown[]): boolean -} - -export type OptionalTypeEncoder = ( - data: unknown, - typ: string, - options: EncodeOptions, - refStack?: Reference -) => TokenOrNestedTokens | null - -export type StrictTypeEncoder = ( - data: unknown, - typ: string, - options: EncodeOptions, - refStack?: Reference -) => TokenOrNestedTokens - -export type TokenTypeEncoder = { - (buf: Bl, token: Token, options?: EncodeOptions): void - compareTokens(t1: Token, t2: Token): number - // TODO: make this non-optional as a breaking change and remove the throw in length.js - encodedSize?(token: Token, options?: EncodeOptions): number -} - -export type MapSorter = (e1: (Token | Token[])[], e2: (Token | Token[])[]) => number - -export type QuickEncodeToken = (token: Token) => Uint8Array | undefined - -export interface DecodeTokenizer { - done(): boolean - next(): Token -} - -export type TagDecoder = (inner: any) => any - -export interface DecodeOptions { - allowIndefinite?: boolean - allowUndefined?: boolean - coerceUndefinedToNull?: boolean - allowInfinity?: boolean - allowNaN?: boolean - allowBigInt?: boolean - strict?: boolean - useMaps?: boolean - rejectDuplicateMapKeys?: boolean - retainStringBytes?: boolean - tags?: TagDecoder[] - tokenizer?: DecodeTokenizer -} - -export interface EncodeOptions { - float64?: boolean - addBreakTokens?: boolean - mapSorter?: MapSorter - quickEncodeToken?: QuickEncodeToken - typeEncoders?: { [typeName: string]: OptionalTypeEncoder } -} diff --git a/_vendor/cborg/lib.ts b/_vendor/cborg/lib.ts deleted file mode 100644 index f389b15..0000000 --- a/_vendor/cborg/lib.ts +++ /dev/null @@ -1,72 +0,0 @@ -// deno-lint-ignore-file no-explicit-any -class Type { - static uint: Type - major: number - majorEncoded: number - name: string - terminal: boolean - static negint: Type - static bytes: Type - static string: Type - static array: Type - static map: Type - static tag: Type - static float: Type - static false: Type - static true: Type - static null: Type - static undefined: Type - static break: Type - - constructor(major: number, name: string, terminal: boolean) { - this.major = major - this.majorEncoded = major << 5 - this.name = name - this.terminal = terminal - } - toString() { - return `Type[${this.major}].${this.name}` - } - /** - * @param {Type} typ - * @returns {number} - */ - compare(typ: Type): number { - return this.major < typ.major ? -1 : this.major > typ.major ? 1 : 0 - } -} -// convert to static fields when better supported -Type.uint = new Type(0, 'uint', true) -Type.negint = new Type(1, 'negint', true) -Type.bytes = new Type(2, 'bytes', true) -Type.string = new Type(3, 'string', true) -Type.array = new Type(4, 'array', false) -Type.map = new Type(5, 'map', false) -Type.tag = new Type(6, 'tag', false) // terminal? -Type.float = new Type(7, 'float', true) -Type.false = new Type(7, 'false', true) -Type.true = new Type(7, 'true', true) -Type.null = new Type(7, 'null', true) -Type.undefined = new Type(7, 'undefined', true) -Type.break = new Type(7, 'break', true) - -class Token { - type: Type - value: any - encodedLength: number - encodedBytes: Uint8Array | undefined - byteValue: Uint8Array | undefined - - constructor(type: Type, value: any, encodedLength: number) { - this.type = type - this.value = value - this.encodedLength = encodedLength - this.encodedBytes = undefined - this.byteValue = undefined - } - /* c8 ignore next 3 */ - toString() { - return `Token[${this.type}].${this.value}` - } -} -export { Type, Token } diff --git a/_vendor/multiformats/basics.ts b/_vendor/multiformats/basics.ts deleted file mode 100644 index e7bc543..0000000 --- a/_vendor/multiformats/basics.ts +++ /dev/null @@ -1,9 +0,0 @@ -import { identity, base32, base64, sha2, raw, json, CID } from '../../deps.ts' - -export const hashes = { ...sha2, ...identity } - -export const bases = { ...base32, ...base64 } - -export const codecs = { raw, json } - -export { CID } diff --git a/_vendor/multiformats/link.ts b/_vendor/multiformats/link.ts deleted file mode 100644 index b258c98..0000000 --- a/_vendor/multiformats/link.ts +++ /dev/null @@ -1,53 +0,0 @@ -import { BlockEncoder, BlockDecoder, Phantom, MultihashDigest, ByteView, MultibaseEncoder, Multibase } from './types.ts' - -/** - * An IPLD codec is a combination of both encoder and decoder. - */ -export interface BlockCodec extends BlockEncoder, BlockDecoder {} - -export type Version = 0 | 1 - -export type DAG_PB = 0x70 -export type SHA_256 = 0x12 - -/** - * Represents an IPLD link to a specific data of type `T`. - * - * @template T - Logical type of the data being linked to. - * @template C - multicodec code corresponding to a codec linked data is encoded with - * @template A - multicodec code corresponding to the hashing algorithm of the CID - * @template V - CID version - */ -export interface Link< - Data extends unknown = unknown, - Format extends number = number, - Alg extends number = number, - V extends Version = 1 -> extends Phantom { - readonly version: V - readonly code: Format - readonly multihash: MultihashDigest - - readonly byteOffset: number - readonly byteLength: number - readonly bytes: ByteView> - - equals: (other: unknown) => other is Link - - toString: ( - base?: MultibaseEncoder - ) => ToString, Prefix> - link: () => Link - - toV1: () => Link -} - -export interface LinkJSON { - '/': ToString -} - -export interface LegacyLink extends Link {} - -export type UnknownLink = LegacyLink | Link - -export type ToString = Multibase & Phantom diff --git a/_vendor/multiformats/types.ts b/_vendor/multiformats/types.ts deleted file mode 100644 index abedf52..0000000 --- a/_vendor/multiformats/types.ts +++ /dev/null @@ -1,63 +0,0 @@ -import type { - MultibaseCodec, - Multibase, - MultibaseEncoder, - MultibaseDecoder -} from 'https://esm.sh/multiformats@11.0.0/src/bases/interface.ts' -import type { MultihashHasher, MultihashDigest } from 'https://esm.sh/multiformats@11.0.0/src/hashes/interface.ts' -/** - * A byte-encoded representation of some type of `Data`. - * - * A `ByteView` is essentially a `Uint8Array` that's been "tagged" with - * a `Data` type parameter indicating the type of encoded data. - * - * For example, a `ByteView<{ hello: "world" }>` is a `Uint8Array` containing a - * binary representation of a `{hello: "world"}`. - */ -export interface ByteView extends Uint8Array, Phantom {} - -declare const Marker: unique symbol - -/** - * A utility type to retain an unused type parameter `T`. - * Similar to [phantom type parameters in Rust](https://doc.rust-lang.org/rust-by-example/generics/phantom.html). - * - * Capturing unused type parameters allows us to define "nominal types," which - * TypeScript does not natively support. Nominal types in turn allow us to capture - * semantics not represented in the actual type structure, without requiring us to define - * new classes or pay additional runtime costs. - * - * For a concrete example, see {@link ByteView}, which extends the `Uint8Array` type to capture - * type information about the structure of the data encoded into the array. - */ -export interface Phantom { - // This field can not be represented because field name is non-existent - // unique symbol. But given that field is optional any object will valid - // type constraint. - [Marker]?: T -} - -/** - * IPLD encoder part of the codec. - */ -export interface BlockEncoder { - name: string - code: Code - encode: (data: T) => ByteView -} - -/** - * IPLD decoder part of the codec. - */ -export interface BlockDecoder { - code: Code - decode: (bytes: ByteView) => T -} - -/** - * An IPLD codec is a combination of both encoder and decoder. - */ -export interface BlockCodec extends BlockEncoder, BlockDecoder {} - -export type { MultihashHasher, MultihashDigest, MultibaseEncoder, MultibaseDecoder } -export type { MultibaseCodec, Multibase } diff --git a/deps.ts b/deps.ts deleted file mode 100644 index 6d152b9..0000000 --- a/deps.ts +++ /dev/null @@ -1,18 +0,0 @@ -export * as dagPB from 'https://esm.sh/@ipld/dag-pb@4.0.0' -export * as dagCBOR from 'https://esm.sh/@ipld/dag-cbor@9.0.0' -export { Multibases } from 'https://esm.sh/ipfs-core-utils@0.18.0/multibases' -export { Multicodecs } from 'https://esm.sh/ipfs-core-utils@0.18.0/multicodecs' -export { Multihashes } from 'https://esm.sh/ipfs-core-utils@0.18.0/multihashes' -export * as cborg from 'https://esm.sh/cborg@1.10.0' -export * as cborgJson from 'https://esm.sh/cborg@1.10.0/json' -export { base32 } from 'https://esm.sh/multiformats@11.0.0/bases/base32' -export { base58btc } from 'https://esm.sh/multiformats@11.0.0/bases/base58' -export { base64 } from 'https://esm.sh/multiformats@11.0.0/bases/base64' -export { coerce } from 'https://esm.sh/multiformats@11.0.0/bytes' -export { CID } from 'https://esm.sh/multiformats@11.0.0/cid' -export { identity } from 'https://esm.sh/multiformats@11.0.0/hashes/identity' -export * as sha2 from 'https://esm.sh/multiformats@11.0.0/hashes/sha2' -export * as raw from 'https://esm.sh/multiformats@11.0.0/codecs/raw' -export * as json from 'https://esm.sh/multiformats@11.0.0/codecs/json' -export { default as globSourceImport } from 'https://esm.sh/ipfs-utils@9.0.14/src/files/glob-source.js' -export { default as urlSource } from 'https://esm.sh/ipfs-utils@9.0.14/src/files/url-source.js' diff --git a/example.ts b/example.ts new file mode 100644 index 0000000..f5b5358 --- /dev/null +++ b/example.ts @@ -0,0 +1,7 @@ +import { create } from './mod.ts' + +const client = create() + +const { cid } = await client.add('Hello World!') + +console.log(cid) diff --git a/mod.ts b/mod.ts index c7581ce..3c71779 100644 --- a/mod.ts +++ b/mod.ts @@ -1,142 +1 @@ -import { dagCBOR, dagPB, Multibases, Multicodecs, Multihashes, identity, globSourceImport } from './deps.ts' -import * as dagJSON from './_vendor/@ipld/dag-json.ts' -import { bases, hashes, codecs } from './_vendor/multiformats/basics.ts' -import type { Options, LoadBaseFn, LoadCodecFn, LoadHasherFn, IPLDOptions } from './types.ts' -import { createBitswap } from './bitswap/index.js' -import { createBlock } from './block/index.js' -import { createBootstrap } from './bootstrap/index.js' -import { createConfig } from './config/index.js' -import { createDag } from './dag/index.js' -import { createDht } from './dht/index.js' -import { createDiag } from './diag/index.js' -import { createFiles } from './files/index.js' -import { createKey } from './key/index.js' -import { createLog } from './log/index.js' -import { createName } from './name/index.js' -import { createObject } from './object/index.js' -import { createPin } from './pin/index.js' -import { createPubsub } from './pubsub/index.js' -import { createRefs } from './refs/index.js' -import { createRepo } from './repo/index.js' -import { createStats } from './stats/index.js' -import { createSwarm } from './swarm/index.js' -import { createAdd } from './add.js' -import { createAddAll } from './add-all.js' -import { createCat } from './cat.js' -import { createCommands } from './commands.js' -import { createDns } from './dns.js' -import { createGetEndpointConfig } from './get-endpoint-config.js' -import { createGet } from './get.js' -import { createId } from './id.js' -import { createIsOnline } from './is-online.js' -import { createLs } from './ls.js' -import { createMount } from './mount.js' -import { createPing } from './ping.js' -import { createResolve } from './resolve.js' -import { createStart } from './start.js' -import { createStop } from './stop.js' -import { createVersion } from './version.js' -import { - MultibaseCodec as _MultibaseCodec, - BlockCodec as _BlockCodec, - MultihashHasher -} from './_vendor/multiformats/types.ts' - -type BlockCodec = _BlockCodec -type MultibaseCodec = _MultibaseCodec - -/** - * @typedef {import('./types').HTTPClientExtraOptions} HTTPClientExtraOptions - * @typedef {import('./types').EndpointConfig} EndpointConfig - * @typedef {import('./types').IPFSHTTPClient} IPFSHTTPClient - */ - -/** - * @param {Options} options - */ -export function create(options: Options = {}) { - const id: BlockCodec = { - name: identity.name, - code: identity.code, - encode: (id) => id, - decode: (id) => id - } - - const multibaseCodecs: MultibaseCodec[] = Object.values(bases) - - ;(options.ipld && options.ipld.bases ? options.ipld.bases : []).forEach((base) => multibaseCodecs.push(base)) - - const multibases = new Multibases({ - bases: multibaseCodecs, - loadBase: options.ipld && options.ipld.loadBase - }) - - /** @type {BlockCodec[]} */ - const blockCodecs: BlockCodec[] = Object.values(codecs) - - ;[dagPB, dagCBOR, dagJSON, dagJOSE, id] - .concat((options.ipld && options.ipld.codecs) || []) - .forEach((codec) => blockCodecs.push(codec)) - - const multicodecs = new Multicodecs({ - codecs: blockCodecs, - loadCodec: options.ipld && options.ipld.loadCodec - }) - - /** @type {MultihashHasher[]} */ - const multihashHashers: MultihashHasher[] = Object.values(hashes) - - ;(options.ipld && options.ipld.hashers ? options.ipld.hashers : []).forEach((hasher) => multihashHashers.push(hasher)) - - const multihashes = new Multihashes({ - hashers: multihashHashers, - loadHasher: options.ipld && options.ipld.loadHasher - }) - - /** @type {IPFSHTTPClient} */ - const client: IPFSHTTPClient = { - add: createAdd(options), - addAll: createAddAll(options), - bitswap: createBitswap(options), - block: createBlock(options), - bootstrap: createBootstrap(options), - cat: createCat(options), - commands: createCommands(options), - config: createConfig(options), - dag: createDag(multicodecs, options), - dht: createDht(options), - diag: createDiag(options), - dns: createDns(options), - files: createFiles(options), - get: createGet(options), - getEndpointConfig: createGetEndpointConfig(options), - id: createId(options), - isOnline: createIsOnline(options), - key: createKey(options), - log: createLog(options), - ls: createLs(options), - mount: createMount(options), - name: createName(options), - object: createObject(multicodecs, options), - pin: createPin(options), - ping: createPing(options), - pubsub: createPubsub(options), - refs: createRefs(options), - repo: createRepo(options), - resolve: createResolve(options), - start: createStart(options), - stats: createStats(options), - stop: createStop(options), - swarm: createSwarm(options), - version: createVersion(options), - bases: multibases, - codecs: multicodecs, - hashers: multihashes - } - - return client -} - -export { CID, urlSource } from './deps.ts' -export { multiaddr } from '@multiformats/multiaddr' -export const globSource = globSourceImport +export * from 'npm:ipfs-http-client' diff --git a/types.ts b/types.ts deleted file mode 100644 index 28f4fac..0000000 --- a/types.ts +++ /dev/null @@ -1,32 +0,0 @@ -import type { Multiaddr } from 'https://esm.sh/@multiformats/multiaddr@11.1.5' -import { BlockCodec, MultibaseCodec, MultihashHasher } from './_vendor/multiformats/types.ts' - -export interface IPLDOptions { - loadBase: LoadBaseFn - loadCodec: LoadCodecFn - loadHasher: LoadHasherFn - bases: Array> - codecs: Array> - hashers: MultihashHasher[] -} - -export interface LoadBaseFn { - (codeOrName: number | string): Promise> -} -export interface LoadCodecFn { - (codeOrName: number | string): Promise> -} -export interface LoadHasherFn { - (codeOrName: number | string): Promise -} - -export interface Options { - host?: string - port?: number - protocol?: string - headers?: Headers | Record - timeout?: number | string - apiPath?: string - url?: URL | string | Multiaddr - ipld?: Partial -}