diff --git a/.gitignore b/.gitignore index 6427dbe..22972d2 100644 --- a/.gitignore +++ b/.gitignore @@ -1,6 +1,8 @@ +.env test/logs .DS_Store .vscode node_modules bun.lockb - +claude_chats +*.jsonl \ No newline at end of file diff --git a/README.md b/README.md index 7ed2023..700ed6c 100644 --- a/README.md +++ b/README.md @@ -14,11 +14,10 @@ the following features from the specification with the goal to be feature comple | DONE | Ability to resolve the full history of the DID | Uses a verifiable chain of updates from genesis to deactivation. | | DONE | A self-certifying identifier (SCID) for the DID | Ensures global uniqueness, derived from the initial DIDDoc for portability. | | DONE | DIDDoc updates include a proof signed by the DID Controller(s) | Proof required for updates, authorized by the DID Controller(s). | -| TODO | Optional mechanism for publishing “pre-rotation” keys | Helps prevent loss of control if an active private key is compromised. | +| DONE | Optional mechanism for publishing "pre-rotation" keys | Helps prevent loss of control if an active private key is compromised. | | TODO | DID URL path handling | Defaults to resolve /path/to/file by DID-to-HTTPS translation, can be overridden. | | TODO | A DID URL path /whois | Automatically returns a Verifiable Presentation, if published by the DID controller. | - ## Prerequisites Install [bun.sh](https://bun.sh/) @@ -33,20 +32,148 @@ curl -fsSL https://bun.sh/install | bash bun install ``` -## Run all tests +## Available Commands + +The following commands are defined in the `package.json` file: + +1. `dev`: Run the resolver in development mode with debugging enabled. + ```bash + bun run dev + ``` + This command runs: `bun --watch --inspect-wait ./src/resolver.ts` + +2. `server`: Run the resolver in watch mode for development. + ```bash + bun run server + ``` + This command runs: `bun --watch ./src/resolver.ts` + +3. `test`: Run all tests. + ```bash + bun run test + ``` + This command runs: `bun test` + +4. `test:watch`: Run tests in watch mode, focusing on witness tests. + ```bash + bun run test:watch + ``` + This command runs: `bun test --watch witness` + +5. `test:bail`: Run tests in watch mode, stopping on the first failure with verbose output. + ```bash + bun run test:bail + ``` + This command runs: `bun test --watch --bail --verbose` + +6. `test:log`: Run tests and save the output to a log file. + ```bash + bun run test:log + ``` + This command runs: `mkdir -p ./test/logs && LOG_RESOLVES=true bun test &> ./test/logs/test-run.txt` + +7. `cli`: Run the CLI tool. + ```bash + bun run cli [command] [options] + ``` + This command runs: `bun run src/cli.ts --` + +## CLI Documentation -```bash -bun test ``` +The CLI is Experimental, buggy and beta software -- use at your own risk! +``` + +The trustdidweb-ts package provides a Command Line Interface (CLI) for managing Decentralized Identifiers (DIDs) using the `did:tdw` method. -### Development mode + +### Usage + +The general syntax for using the CLI is: ```bash -bun run test:watch +bun run cli [command] [options] ``` -## Run the tests and save a log +To output the help using the CLI: ```bash -bun run test:log +bun run cli help ``` + +### Commands + +1. **Create a DID** + + ```bash + bun run cli create [options] + ``` + + Options: + - `--domain [domain]`: (Required) Domain for the DID + - `--output [file]`: (Optional) Path to save the DID log + - `--portable`: (Optional) Make the DID portable + - `--prerotation`: (Optional) Enable pre-rotation + - `--witness [witness]`: (Optional) Add a witness (can be used multiple times) + - `--witness-threshold [n]`: (Optional) Set witness threshold + + Example: + ```bash + bun run cli create --domain example.com --portable --witness did:tdw:QmWitness1:example.com --witness did:tdw:QmWitness2...:example.com + ``` + +2. **Resolve a DID** + + ```bash + bun run cli resolve --did [did] + ``` + + Example: + ```bash + bun run cli resolve --did did:tdw:Qm...:example.com + ``` + +3. **Update a DID** + + ```bash + bun run cli update [options] + ``` + + Options: + - `--log [file]`: (Required) Path to the DID log file + - `--output [file]`: (Optional) Path to save the updated DID log + - `--prerotation`: (Optional) Enable pre-rotation + - `--witness [witness]`: (Optional) Add a witness (can be used multiple times) + - `--witness-threshold [n]`: (Optional) Set witness threshold + - `--service [service]`: (Optional) Add a service (format: type,endpoint) + - `--add-vm [type]`: (Optional) Add a verification method + - `--also-known-as [alias]`: (Optional) Add an alsoKnownAs alias + + Example: + ```bash + bun run cli update --log ./did.jsonl --output ./updated-did.jsonl --add-vm keyAgreement --service LinkedDomains,https://example.com + ``` + +4. **Deactivate a DID** + + ```bash + bun run cli deactivate [options] + ``` + + Options: + - `--log [file]`: (Required) Path to the DID log file + - `--output [file]`: (Optional) Path to save the deactivated DID log + + Example: + ```bash + bun run cli deactivate --log ./did.jsonl --output ./deactivated-did.jsonl + ``` + +### Additional Notes + +- The CLI automatically generates new authentication keys when creating or updating a DID. +- The `--portable` option in the create command allows the DID to be moved to a different domain later. +- The `--prerotation` option enables key pre-rotation, which helps prevent loss of control if an active private key is compromised. +- Witness functionality allows for third-party attestation of DID operations. +- The CLI saves the DID log to a file when the `--output` option is provided. +- For the update and deactivate commands, the existing DID log must be provided using the `--log` option. diff --git a/package.json b/package.json index 1aaecb2..db520bb 100644 --- a/package.json +++ b/package.json @@ -3,16 +3,17 @@ "module": "src/index.ts", "type": "module", "scripts": { + "dev": "bun --watch --inspect-wait ./src/resolver.ts", "server": "bun --watch ./src/resolver.ts", "test": "bun test", - "test:watch": "bun test --watch", - "test:bail": "bun test --watch --bail", - "test:log": "mkdir -p ./test/logs && LOG_RESOLVES=true bun test &> ./test/logs/test-run.txt" + "test:watch": "bun test --watch witness", + "test:bail": "bun test --watch --bail --verbose", + "test:log": "mkdir -p ./test/logs && LOG_RESOLVES=true bun test &> ./test/logs/test-run.txt", + "cli": "bun run src/cli.ts" }, "devDependencies": { - "bun-types": "latest", - "ts-node": "^10.9.2", - "tsx": "^4.7.1" + "bun-bagel": "^1.1.0", + "bun-types": "latest" }, "peerDependencies": { "typescript": "^5.0.0" diff --git a/src/assertions.ts b/src/assertions.ts index 4e27e1c..874e20e 100644 --- a/src/assertions.ts +++ b/src/assertions.ts @@ -1,22 +1,48 @@ import * as ed from '@noble/ed25519'; import { base58btc } from "multiformats/bases/base58"; -import { bytesToHex, createSCID, deriveHash } from "./utils"; +import { bytesToHex, createSCID, deriveHash, resolveVM } from "./utils"; import { canonicalize } from 'json-canonicalize'; import { createHash } from 'node:crypto'; -export const keyIsAuthorized = (key: string, updateKeys: string[]) => { +const isKeyAuthorized = (verificationMethod: string, updateKeys: string[]): boolean => { if (process.env.IGNORE_ASSERTION_KEY_IS_AUTHORIZED) return true; - return updateKeys.includes(key); -} -export const documentStateIsValid = async (doc: any, proofs: any[], updateKeys: string[]) => { + if (verificationMethod.startsWith('did:key:')) { + const key = verificationMethod.split('did:key:')[1].split('#')[0]; + return updateKeys.includes(key); + } + return false; +}; + +const isWitnessAuthorized = (verificationMethod: string, witnesses: string[]): boolean => { + if (process.env.IGNORE_WITNESS_IS_AUTHORIZED) return true; + + if (verificationMethod.startsWith('did:tdw:')) { + const didWithoutFragment = verificationMethod.split('#')[0]; + return witnesses.includes(didWithoutFragment); + } + return false; +}; + +export const documentStateIsValid = async (doc: any, proofs: any[], updateKeys: string[], witnesses: string[] = []) => { if (process.env.IGNORE_ASSERTION_DOCUMENT_STATE_IS_VALID) return true; + let i = 0; while(i < proofs.length) { const proof = proofs[i]; - if (!keyIsAuthorized(proof.verificationMethod.split('#')[0].split('did:key:').at(-1), updateKeys)) { - throw new Error(`key ${proof.verificationMethod} is not authorized to update.`) + + if (proof.verificationMethod.startsWith('did:key:')) { + if (!isKeyAuthorized(proof.verificationMethod, updateKeys)) { + throw new Error(`Key ${proof.verificationMethod} is not authorized to update.`); + } + } else if (proof.verificationMethod.startsWith('did:tdw:')) { + if (witnesses.length > 0 && !isWitnessAuthorized(proof.verificationMethod, witnesses)) { + throw new Error(`Key ${proof.verificationMethod} is not from an authorized witness.`); + } + } else { + throw new Error(`Unsupported verification method: ${proof.verificationMethod}`); } + if (proof.type !== 'DataIntegrityProof') { throw new Error(`Unknown proof type ${proof.type}`); } @@ -26,7 +52,11 @@ export const documentStateIsValid = async (doc: any, proofs: any[], updateKeys: if (proof.cryptosuite !== 'eddsa-jcs-2022') { throw new Error(`Unknown cryptosuite ${proof.cryptosuite}`); } - const publicKey = base58btc.decode(proof.verificationMethod.split('did:key:')[1].split('#')[0]); + const vm = await resolveVM(proof.verificationMethod); + if (!vm) { + throw new Error(`Verification Method ${proof.verificationMethod} not found`); + } + const publicKey = base58btc.decode(vm.publicKeyMultibase!); if (publicKey[0] !== 237 || publicKey[1] !== 1) { throw new Error(`multiKey doesn't include ed25519 header (0xed01)`) } @@ -35,7 +65,6 @@ export const documentStateIsValid = async (doc: any, proofs: any[], updateKeys: const dataHash = createHash('sha256').update(canonicalize(doc)).digest(); const proofHash = createHash('sha256').update(canonicalize(restProof)).digest(); const input = Buffer.concat([dataHash, proofHash]); - const verified = await ed.verifyAsync( bytesToHex(sig), bytesToHex(input), diff --git a/src/cli.ts b/src/cli.ts new file mode 100644 index 0000000..d22ac0f --- /dev/null +++ b/src/cli.ts @@ -0,0 +1,265 @@ +import { createDID, resolveDID, updateDID, deactivateDID } from './method'; +import { createSigner, generateEd25519VerificationMethod } from './cryptography'; +import { getFileUrl, readLogFromDisk, writeLogToDisk, writeVerificationMethodToEnv } from './utils'; + +const usage = ` +Usage: bun run cli [command] [options] + +Commands: + create Create a new DID + resolve Resolve a DID + update Update an existing DID + deactivate Deactivate an existing DID + +Options: + --domain [domain] Domain for the DID (required for create) + --log [file] Path to the DID log file (required for resolve, update, deactivate) + --output [file] Path to save the updated DID log (optional for create, update, deactivate) + --portable Make the DID portable (optional for create) + --prerotation Enable pre-rotation (optional for create and update) + --witness [witness] Add a witness (can be used multiple times) + --witness-threshold [n] Set witness threshold (optional, defaults to number of witnesses) + --service [service] Add a service (format: type,endpoint) (can be used multiple times) + --add-vm [type] Add a verification method (type can be authentication, assertionMethod, keyAgreement, capabilityInvocation, capabilityDelegation) + --also-known-as [alias] Add an alsoKnownAs alias (can be used multiple times) + +Examples: + bun run cli create --domain example.com --portable --witness did:example:witness1 --witness did:example:witness2 + bun run cli resolve --did did:tdw:123456:example.com + bun run cli update --log ./did.jsonl --output ./updated-did.jsonl --add-vm keyAgreement --service LinkedDomains,https://example.com + bun run cli deactivate --log ./did.jsonl --output ./deactivated-did.jsonl +`; + +async function main() { + const args = Bun.argv.slice(2); + const command = args[0]; + + if (!command) { + console.log(usage); + process.exit(1); + } + + try { + switch (command) { + case 'create': + await handleCreate(args.slice(1)); + break; + case 'resolve': + await handleResolve(args.slice(1)); + break; + case 'update': + await handleUpdate(args.slice(1)); + break; + case 'deactivate': + await handleDeactivate(args.slice(1)); + break; + default: + console.log(`Unknown command: ${command}`); + console.log(usage); + process.exit(1); + } + } catch (error) { + console.error('An error occurred:', error); + process.exit(1); + } +} + +async function handleCreate(args: string[]) { + const options = parseOptions(args); + const domain = options['domain'] as string; + const output = options['output'] as string | undefined; + const portable = options['portable'] !== undefined; + const prerotation = options['prerotation'] !== undefined; + const witnesses = options['witness'] as string[] | undefined; + const witnessThreshold = options['witness-threshold'] ? parseInt(options['witness-threshold'] as string) : witnesses?.length ?? 0; + + if (!domain) { + console.error('Domain is required for create command'); + process.exit(1); + } + + const authKey = await generateEd25519VerificationMethod('authentication'); + const { did, doc, meta, log } = await createDID({ + domain, + signer: createSigner(authKey), + updateKeys: [authKey.publicKeyMultibase!], + verificationMethods: [authKey], + portable, + prerotation, + witnesses, + witnessThreshold, + }); + + console.log('Created DID:', did); + // console.log('DID Document:', JSON.stringify(doc, null, 2)); + // console.log('Meta:', JSON.stringify(meta, null, 2)); + // console.log('DID Log:', JSON.stringify(log, null, 2)); + + if (output) { + writeLogToDisk(output, log); + console.log(`DID log written to ${output}`); + writeVerificationMethodToEnv({...authKey, controller: did, id: `${did}#${authKey.publicKeyMultibase?.slice(-8)}`}); + console.log(`DID verification method saved to env`); + } +} + +async function handleResolve(args: string[]) { + const options = parseOptions(args); + const didIdentifier = options['did'] as string; + + if (!didIdentifier) { + console.error('DID identifier is required for resolve command'); + process.exit(1); + } + + try { + const log = await fetchLogFromIdentifier(didIdentifier); + const { did, doc, meta } = await resolveDID(log); + + console.log('Resolved DID:', did); + console.log('DID Document:', JSON.stringify(doc, null, 2)); + console.log('Metadata:', meta); + } catch (error) { + console.error('Error resolving DID:', error); + process.exit(1); + } +} + +async function fetchLogFromIdentifier(identifier: string): Promise { + try { + const url = getFileUrl(identifier); + console.log(url, identifier) + const response = await fetch(url); + + if (!response.ok) { + throw new Error(`HTTP error! status: ${response.status}`); + } + + const text = await response.text(); + return text.trim().split('\n').map(line => JSON.parse(line)); + } catch (error) { + console.error('Error fetching DID log:', error); + throw error; + } +} + +async function handleUpdate(args: string[]) { + const options = parseOptions(args); + const logFile = options['log'] as string; + const output = options['output'] as string | undefined; + const prerotation = options['prerotation'] !== undefined; + const witnesses = options['witness'] as string[] | undefined; + const witnessThreshold = options['witness-threshold'] ? parseInt(options['witness-threshold'] as string) : undefined; + const services = options['service'] ? parseServices(options['service'] as string[]) : undefined; + const addVm = options['add-vm'] as VerificationMethodType[] | undefined; + const alsoKnownAs = options['also-known-as'] as string[] | undefined; + + if (!logFile) { + console.error('Log file is required for update command'); + process.exit(1); + } + + const log = readLogFromDisk(logFile); + const authKey = await generateEd25519VerificationMethod('authentication'); + + const verificationMethods: VerificationMethod[] = [ + authKey, + ...(addVm?.map(type => ({ + type, + publicKeyMultibase: authKey.publicKeyMultibase, + } as VerificationMethod)) || []) + ]; + + const { did, doc, meta, log: updatedLog } = await updateDID({ + log, + signer: createSigner(authKey), + updateKeys: [authKey.publicKeyMultibase!], + verificationMethods, + prerotation, + witnesses, + witnessThreshold, + services, + alsoKnownAs, + }); + + console.log('Updated DID:', did); + console.log('Updated DID Document:', JSON.stringify(doc, null, 2)); + console.log('Updated Metadata:', meta); + + if (output) { + writeLogToDisk(output, updatedLog); + console.log(`Updated DID log written to ${output}`); + } +} + +async function handleDeactivate(args: string[]) { + const options = parseOptions(args); + const logFile = options['log'] as string; + const output = options['output'] as string | undefined; + + if (!logFile) { + console.error('Log file is required for deactivate command'); + process.exit(1); + } + + const log = readLogFromDisk(logFile); + const authKey = await generateEd25519VerificationMethod('authentication'); + const { did, doc, meta, log: deactivatedLog } = await deactivateDID({ + log, + signer: createSigner(authKey), + }); + + console.log('Deactivated DID:', did); + console.log('Deactivated DID Document:', JSON.stringify(doc, null, 2)); + console.log('Deactivated Metadata:', meta); + + if (output) { + writeLogToDisk(output, deactivatedLog); + console.log(`Deactivated DID log written to ${output}`); + } +} + +type VerificationMethodType = 'authentication' | 'assertionMethod' | 'keyAgreement' | 'capabilityInvocation' | 'capabilityDelegation'; + +function parseOptions(args: string[]): Record { + const options: Record = {}; + for (let i = 0; i < args.length; i++) { + if (args[i].startsWith('--')) { + const key = args[i].slice(2); + if (i + 1 < args.length && !args[i + 1].startsWith('--')) { + if (key === 'witness' || key === 'service' || key === 'also-known-as') { + options[key] = options[key] || []; + (options[key] as string[]).push(args[++i]); + } else if (key === 'add-vm') { + options[key] = options[key] || []; + const value = args[++i]; + if (isValidVerificationMethodType(value)) { + (options[key] as VerificationMethodType[]).push(value); + } else { + console.error(`Invalid verification method type: ${value}`); + process.exit(1); + } + } else { + options[key] = args[++i]; + } + } else { + options[key] = ''; + } + } + } + return options; +} + +// Add this function to validate VerificationMethodType +function isValidVerificationMethodType(type: string): type is VerificationMethodType { + return ['authentication', 'assertionMethod', 'keyAgreement', 'capabilityInvocation', 'capabilityDelegation'].includes(type); +} + +function parseServices(services: string[]): ServiceEndpoint[] { + return services.map(service => { + const [type, serviceEndpoint] = service.split(','); + return { type, serviceEndpoint }; + }); +} + +main(); \ No newline at end of file diff --git a/src/cryptography.ts b/src/cryptography.ts index a40fb45..436f3aa 100644 --- a/src/cryptography.ts +++ b/src/cryptography.ts @@ -6,13 +6,13 @@ import { base58btc } from "multiformats/bases/base58" import { canonicalize } from 'json-canonicalize'; import { createHash } from 'node:crypto'; -export const createSigner = (vm: VerificationMethod) => { +export const createSigner = (vm: VerificationMethod, useStatic: boolean = true) => { return async (doc: any, challenge: string) => { try { const proof: any = { type: 'DataIntegrityProof', cryptosuite: 'eddsa-jcs-2022', - verificationMethod: `did:key:${vm.publicKeyMultibase}`, + verificationMethod: useStatic ? `did:key:${vm.publicKeyMultibase}` : vm.id, created: createDate(), proofPurpose: 'authentication', challenge diff --git a/src/global.d.ts b/src/global.d.ts index 8c54cb6..446ebaf 100644 --- a/src/global.d.ts +++ b/src/global.d.ts @@ -1,2 +1,2 @@ declare module 'fast-json-patch/index.mjs'; -declare module '@interop/base58-universal'; \ No newline at end of file +declare module '@interop/base58-universal'; diff --git a/src/interfaces.d.ts b/src/interfaces.d.ts index c9acc5b..e06c801 100644 --- a/src/interfaces.d.ts +++ b/src/interfaces.d.ts @@ -1,3 +1,18 @@ +interface DIDResolutionMeta { + versionId: string; + created: string; + updated: string; + previousLogEntryHash?: string; + updateKeys: string[]; + scid: string; + prerotation: boolean; + portable: boolean; + nextKeyHashes: string[]; + deactivated: boolean; + witnesses: string[], + witnessThreshold: number; +} + interface DIDDoc { id?: string; controller?: string | string[]; @@ -17,6 +32,17 @@ interface DIDOperation { value: any; } +interface DataIntegrityProof { + id?: string; + type: string; + cryptosuite: string; + verificationMethod: string; + created: string; + proofValue: string; + proofPurpose: string; + challenge?: string; +} + type DIDLogEntry = [ versionId: string, timestamp: string, @@ -26,11 +52,15 @@ type DIDLogEntry = [ updateKeys?: string[], prerotation?: boolean, nextKeyHashes?: string[], - portable?: boolean + portable?: boolean, + witnesses?: string[], + witnessThreshold?: number, + deactivated?: boolean }, data: {value: any} | {patch: DIDOperation[]}, - proof?: any + proof?: DataIntegrityProof[] ]; + type DIDLog = DIDLogEntry[]; interface ServiceEndpoint { @@ -60,6 +90,8 @@ interface CreateDIDInterface { prerotation?: boolean; nextKeyHashes?: string[]; portable?: boolean; + witnesses?: string[]; + witnessThreshold?: number; } interface SignDIDDocInterface { @@ -82,6 +114,8 @@ interface UpdateDIDInterface { deactivated?: boolean; prerotation?: boolean; nextKeyHashes?: string[]; + witnesses?: string[]; + witnessThreshold?: number; } interface DeactivateDIDInterface { diff --git a/src/method.ts b/src/method.ts index f18e480..e2afdc5 100644 --- a/src/method.ts +++ b/src/method.ts @@ -1,10 +1,10 @@ import * as jsonpatch from 'fast-json-patch/index.mjs'; -import { clone, createDate, createDIDDoc, createSCID, deriveHash, normalizeVMs } from "./utils"; +import { clone, collectWitnessProofs, createDate, createDIDDoc, createSCID, deriveHash, findVerificationMethod, normalizeVMs } from "./utils"; import { BASE_CONTEXT, METHOD, PLACEHOLDER, PROTOCOL } from './constants'; import { documentStateIsValid, hashChainValid, newKeysAreValid, scidIsFromHash } from './assertions'; -export const createDID = async (options: CreateDIDInterface): Promise<{did: string, doc: any, meta: any, log: DIDLog}> => { +export const createDID = async (options: CreateDIDInterface): Promise<{did: string, doc: any, meta: DIDResolutionMeta, log: DIDLog}> => { if (!options.updateKeys) { throw new Error('Update keys not supplied') } @@ -12,28 +12,48 @@ export const createDID = async (options: CreateDIDInterface): Promise<{did: stri const controller = `did:${METHOD}:${PLACEHOLDER}:${options.domain}`; const createdDate = createDate(options.created); let {doc} = await createDIDDoc({...options, controller}); + const params = { + scid: PLACEHOLDER, + updateKeys: options.updateKeys, + portable: options.portable ?? false, + ...(options.prerotation ? {prerotation: true, nextKeyHashes: options.nextKeyHashes ?? []} : {prerotation: false, nextKeyHashes: []}), + ...(options.witnesses ? { + witnesses: options.witnesses, + witnessThreshold: options.witnessThreshold || options.witnesses.length + } : { + witnesses: [], + witnessThreshold: 0 + }), + deactivated: false + }; const initialLogEntry: DIDLogEntry = [ PLACEHOLDER, createdDate, { method: PROTOCOL, - scid: PLACEHOLDER, - updateKeys: options.updateKeys, - portable: options.portable ?? false, - ...(options.prerotation ? {prerotation: true, nextKeyHashes: options.nextKeyHashes} : {}) + ...params }, {value: doc} ] const initialLogEntryHash = deriveHash(initialLogEntry); - const scid = await createSCID(initialLogEntryHash); - doc = JSON.parse(JSON.stringify(doc).replaceAll(PLACEHOLDER, scid)); + params.scid = await createSCID(initialLogEntryHash); + doc = JSON.parse(JSON.stringify(doc).replaceAll(PLACEHOLDER, params.scid)); initialLogEntry[0] = `1-${initialLogEntryHash}`; - initialLogEntry[2] = JSON.parse(JSON.stringify(initialLogEntry[2]).replaceAll(PLACEHOLDER, scid)); + initialLogEntry[2] = JSON.parse(JSON.stringify(initialLogEntry[2]).replaceAll(PLACEHOLDER, params.scid)); initialLogEntry[3] = { value: doc } const signedDoc = await options.signer(doc, initialLogEntry[0]); - initialLogEntry.push([signedDoc.proof]); + let allProofs = [signedDoc.proof]; + initialLogEntry.push(allProofs); + + if (options.witnesses && options.witnesses.length > 0) { + const witnessProofs = await collectWitnessProofs(options.witnesses, [initialLogEntry]); + if (witnessProofs.length > 0) { + allProofs = [...allProofs, ...witnessProofs]; + initialLogEntry[4] = allProofs; + } + } return { did: doc.id!, doc, @@ -41,7 +61,7 @@ export const createDID = async (options: CreateDIDInterface): Promise<{did: stri versionId: initialLogEntry[0], created: initialLogEntry[1], updated: initialLogEntry[1], - ...(options.prerotation ? {prerotation: true, nextKeyHashes: options.nextKeyHashes} : {}) + ...params }, log: [ initialLogEntry @@ -49,65 +69,81 @@ export const createDID = async (options: CreateDIDInterface): Promise<{did: stri } } -export const resolveDID = async (log: DIDLog, options: {versionNumber?: number, versionId?: string, versionTime?: Date} = {}): Promise<{did: string, doc: any, meta: any}> => { +export const resolveDID = async (log: DIDLog, options: { + versionNumber?: number, + versionId?: string, + versionTime?: Date, + verificationMethod?: string +} = {}): Promise<{did: string, doc: any, meta: DIDResolutionMeta}> => { + if (options.verificationMethod && (options.versionNumber || options.versionId)) { + throw new Error("Cannot specify both verificationMethod and version number/id"); + } const resolutionLog = clone(log); const protocol = resolutionLog[0][2].method; if(protocol !== PROTOCOL) { throw new Error(`'${protocol}' protocol unknown.`); } - let versionId = ''; let doc: any = {}; let did = ''; - let scid = ''; - let created = ''; - let updated = ''; + let meta: DIDResolutionMeta = { + versionId: '', + created: '', + updated: '', + previousLogEntryHash: '', + scid: '', + prerotation: false, + portable: false, + nextKeyHashes: [], + deactivated: false, + updateKeys: [], + witnesses: [], + witnessThreshold: 0 + }; let host = ''; - let updateKeys = []; - let portable = false; - let previousLogEntryHash = ''; let i = 0; - let deactivated: boolean | null = null; - let prerotation = false; let nextKeyHashes: string[] = []; + for (const entry of resolutionLog) { - const [currentVersionId, timestamp, params, data, ...rest] = entry; + const [currentVersionId, timestamp, params, data, proof] = entry; const [version, entryHash] = currentVersionId.split('-'); if (parseInt(version) !== i + 1) { throw new Error(`version '${version}' in log doesn't match expected '${i + 1}'.`); } - versionId = currentVersionId; + meta.versionId = currentVersionId; if (timestamp) { // TODO check timestamps make sense } - updated = timestamp; + meta.updated = timestamp; // doc patches & proof let newDoc; if (version === '1') { - created = timestamp; + meta.created = timestamp; newDoc = data.value; host = newDoc.id.split(':').at(-1); - scid = params.scid; - portable = params.portable ?? portable; - updateKeys = params.updateKeys; - prerotation = params.prerotation === true; + meta.scid = params.scid; + meta.portable = params.portable ?? meta.portable; + meta.updateKeys = params.updateKeys; + meta.prerotation = params.prerotation === true; + meta.witnesses = params.witnesses || meta.witnesses; + meta.witnessThreshold = params.witnessThreshold || meta.witnessThreshold || meta.witnesses.length; nextKeyHashes = params.nextKeyHashes ?? []; - newKeysAreValid(updateKeys, [], nextKeyHashes, false, prerotation === true); + newKeysAreValid(meta.updateKeys, [], nextKeyHashes, false, meta.prerotation === true); const logEntryHash = deriveHash( [ PLACEHOLDER, - created, - JSON.parse(JSON.stringify(params).replaceAll(scid, PLACEHOLDER)), - {value: JSON.parse(JSON.stringify(newDoc).replaceAll(scid, PLACEHOLDER))} + meta.created, + JSON.parse(JSON.stringify(params).replaceAll(meta.scid, PLACEHOLDER)), + {value: JSON.parse(JSON.stringify(newDoc).replaceAll(meta.scid, PLACEHOLDER))} ] ); - previousLogEntryHash = logEntryHash; - if (!await scidIsFromHash(scid, logEntryHash)) { - throw new Error(`SCID '${scid}' not derived from logEntryHash '${logEntryHash}'`); + meta.previousLogEntryHash = logEntryHash; + if (!await scidIsFromHash(meta.scid, logEntryHash)) { + throw new Error(`SCID '${meta.scid}' not derived from logEntryHash '${logEntryHash}'`); } - const verified = await documentStateIsValid(newDoc, rest[0], updateKeys); + const verified = await documentStateIsValid(newDoc, proof, meta.updateKeys, meta.witnesses); if (!verified) { - throw new Error(`version ${versionId} failed verification of the proof.`) + throw new Error(`version ${meta.versionId} failed verification of the proof.`) } } else { // version number > 1 @@ -120,70 +156,66 @@ export const resolveDID = async (log: DIDLog, options: {versionNumber?: number, throw new Error("prerotation enabled without nextKeyHashes"); } const newHost = newDoc.id.split(':').at(-1); - if (!portable && newHost !== host) { + if (!meta.portable && newHost !== host) { throw new Error("Cannot move DID: portability is disabled"); } else if (newHost !== host) { host = newHost; } - newKeysAreValid(params.updateKeys ?? [], nextKeyHashes, params.nextKeyHashes ?? [], prerotation, params.prerotation === true); + newKeysAreValid(params.updateKeys ?? [], nextKeyHashes, params.nextKeyHashes ?? [], meta.prerotation, params.prerotation === true); if (!hashChainValid(`${i+1}-${entryHash}`, entry[0])) { - throw new Error(`Hash chain broken at '${versionId}'`); + throw new Error(`Hash chain broken at '${meta.versionId}'`); } - const verified = await documentStateIsValid(newDoc, rest[0], updateKeys); + const verified = await documentStateIsValid(newDoc, proof, meta.updateKeys, meta.witnesses); if (!verified) { - throw new Error(`version ${versionId} failed verification of the proof.`) + throw new Error(`version ${meta.versionId} failed verification of the proof.`) } if (params.updateKeys) { - updateKeys = params.updateKeys; + meta.updateKeys = params.updateKeys; } if (params.deactivated === true) { - deactivated = true; + meta.deactivated = true; } if (params.prerotation === true) { - prerotation = true; + meta.prerotation = true; } if (params.nextKeyHashes) { nextKeyHashes = params.nextKeyHashes; } + if (params.witnesses) { + meta.witnesses = params.witnesses; + meta.witnessThreshold = params.witnessThreshold || params.witnesses.length; + } } doc = clone(newDoc); did = doc.id; - if (options.versionNumber === version || options.versionId === versionId) { - return {did, doc, meta: {versionId, created, updated, previousLogEntryHash, scid}} + + // Check for matching verification method + if (options.verificationMethod && findVerificationMethod(doc, options.verificationMethod)) { + return {did, doc, meta}; } - if (options.versionTime && options.versionTime > new Date(updated)) { + + if (options.versionNumber === parseInt(version) || options.versionId === meta.versionId) { + return {did, doc, meta}; + } + if (options.versionTime && options.versionTime > new Date(meta.updated)) { if (resolutionLog[i+1] && options.versionTime < new Date(resolutionLog[i+1][1])) { - return {did, doc, meta: {versionId, created, updated, previousLogEntryHash, scid}} + return {did, doc, meta}; } else if(!resolutionLog[i+1]) { - return {did, doc, meta: {versionId, created, updated, previousLogEntryHash, scid}} + return {did, doc, meta}; } } i++; } - if (options.versionTime || options.versionId) { + if (options.versionTime || options.versionId || options.verificationMethod) { throw new Error(`DID with options ${JSON.stringify(options)} not found`); } - return { - did, - doc, - meta: { - versionId, - created, - updated, - previousLogEntryHash, - scid, - prerotation, - portable, - nextKeyHashes, - ...(deactivated ? {deactivated}: {}) - } - } + return {did, doc, meta}; } -export const updateDID = async (options: UpdateDIDInterface): Promise<{did: string, doc: any, meta: any, log: DIDLog}> => { +export const updateDID = async (options: UpdateDIDInterface): Promise<{did: string, doc: any, meta: DIDResolutionMeta, log: DIDLog}> => { const { log, updateKeys, context, verificationMethods, services, alsoKnownAs, - controller, domain, nextKeyHashes, prerotation + controller, domain, nextKeyHashes, prerotation, witnesses, witnessThreshold } = options; let {did, doc, meta} = await resolveDID(log); newKeysAreValid(updateKeys ?? [], meta.nextKeyHashes ?? [], nextKeyHashes ?? [], meta.prerotation === true, prerotation === true); @@ -203,6 +235,14 @@ export const updateDID = async (options: UpdateDIDInterface): Promise<{did: stri ...(services ? {service: services} : {}), ...(alsoKnownAs ? {alsoKnownAs} : {}) } + const params = { + ...(updateKeys ? {updateKeys} : {}), + ...(prerotation ? {prerotation: true, nextKeyHashes} : {}), + ...(witnesses || meta.witnesses ? { + witnesses: witnesses || meta.witnesses, + witnessThreshold: witnesses ? witnessThreshold || witnesses.length : meta.witnessThreshold + } : {}) + }; const [currentVersion] = meta.versionId.split('-'); const nextVersion = parseInt(currentVersion) + 1; meta.updated = createDate(options.updated); @@ -210,25 +250,30 @@ export const updateDID = async (options: UpdateDIDInterface): Promise<{did: stri const logEntry = [ meta.versionId, meta.updated, - { - ...(updateKeys ? {updateKeys} : {}), - ...(prerotation ? {prerotation: true, nextKeyHashes} : {}) - }, - {patch: clone(patch)} + params, + {patch: clone(patch)}, + [] as DataIntegrityProof[] ]; const logEntryHash = deriveHash(logEntry); logEntry[0] = `${nextVersion}-${logEntryHash}`; const signedDoc = await options.signer(newDoc, logEntry[0]); - logEntry.push([signedDoc.proof]) + logEntry[4] = [signedDoc.proof]; + if (meta.witnesses && meta.witnesses.length > 0) { + const witnessProofs = await collectWitnessProofs(meta.witnesses, [...log, logEntry] as DIDLog); + if (witnessProofs.length > 0) { + logEntry[4] = [...logEntry[4], ...witnessProofs]; + } + } return { did, doc: newDoc, meta: { + ...meta, versionId: logEntry[0], created: meta.created, updated: meta.updated, previousLogEntryHash: meta.previousLogEntryHash, - ...(prerotation ? {prerotation: true, nextKeyHashes} : {}) + ...params }, log: [ ...clone(log), @@ -237,7 +282,7 @@ export const updateDID = async (options: UpdateDIDInterface): Promise<{did: stri }; } -export const deactivateDID = async (options: DeactivateDIDInterface): Promise<{did: string, doc: any, meta: any, log: DIDLog}> => { +export const deactivateDID = async (options: DeactivateDIDInterface): Promise<{did: string, doc: any, meta: DIDResolutionMeta, log: DIDLog}> => { const {log} = options; let {did, doc, meta} = await resolveDID(log); const newDoc = { @@ -253,7 +298,7 @@ export const deactivateDID = async (options: DeactivateDIDInterface): Promise<{d const nextVersion = parseInt(currentVersion) + 1; meta.updated = createDate(meta.created); const patch = jsonpatch.compare(doc, newDoc); - const logEntry = [ + const logEntry: DIDLogEntry = [ meta.versionId, meta.updated, {deactivated: true}, @@ -267,6 +312,7 @@ export const deactivateDID = async (options: DeactivateDIDInterface): Promise<{d did, doc: newDoc, meta: { + ...meta, versionId: logEntry[0], created: meta.created, updated: meta.updated, diff --git a/src/resolver.ts b/src/resolver.ts index f023720..bdba151 100644 --- a/src/resolver.ts +++ b/src/resolver.ts @@ -1,18 +1,29 @@ import { Elysia } from 'elysia' -import { getLatestDIDDoc, getLogFile } from './routes/did'; +import { getLatestDIDDoc, getLogFileForBase, getLogFileForSCID } from './routes/did'; +import { createWitnessProof } from './witness'; const app = new Elysia() + .get('/health', 'ok') + .get('/.well-known/did.jsonl', () => getLogFileForBase()) + .post('/witness', async ({body}) => { + const result = await createWitnessProof((body as any).log); + console.log(`Signed with VM`, (result as any).proof.verificationMethod) + if ('error' in result) { + return { error: result.error }; + } + return { proof: result.proof }; + }) .group('/:id', app => { return app - .get('/did.jsonl', ({params}) => getLogFile({params: {scid: params.id}})) - .get('/', ({params, set}) => getLatestDIDDoc({params, set})) + .get('/did.jsonl', ({params}) => getLogFileForSCID({params: {scid: params.id}})) .get('/:version', ({params: {id, version}}) => { console.log(version) }) .get('/versions', ({params: {id}}) => { console.log('versions') }) - }) + .get('/', ({params}) => getLatestDIDDoc({params})) + }) .listen(8000) diff --git a/src/routes/.well-known/did.jsonl b/src/routes/.well-known/did.jsonl new file mode 100644 index 0000000..c5f59af --- /dev/null +++ b/src/routes/.well-known/did.jsonl @@ -0,0 +1 @@ +["1-QmWUentPDpw1fYJW61yet2rNG74APgdVz61SpxnsiWveqK","2024-10-03T07:45:20Z",{"method":"did:tdw:0.3","scid":"QmWUentPDpw1fYJW61yet2rNG74APgdVz61SpxnsiWveqK","updateKeys":["z6MkjxzETWiQ89KaWk8ToVzLwghqKSjAd3JFcudpbkTooi6G"],"portable":false,"prerotation":false,"nextKeyHashes":[],"witnesses":[],"witnessThreshold":0,"deactivated":false},{"value":{"@context":["https://www.w3.org/ns/did/v1","https://w3id.org/security/multikey/v1"],"id":"did:tdw:QmWUentPDpw1fYJW61yet2rNG74APgdVz61SpxnsiWveqK:localhost%3A8000","controller":"did:tdw:QmWUentPDpw1fYJW61yet2rNG74APgdVz61SpxnsiWveqK:localhost%3A8000","authentication":["did:tdw:QmWUentPDpw1fYJW61yet2rNG74APgdVz61SpxnsiWveqK:localhost%3A8000#bkTooi6G"],"verificationMethod":[{"id":"did:tdw:QmWUentPDpw1fYJW61yet2rNG74APgdVz61SpxnsiWveqK:localhost%3A8000#bkTooi6G","controller":"did:tdw:QmWUentPDpw1fYJW61yet2rNG74APgdVz61SpxnsiWveqK:localhost%3A8000","type":"Multikey","publicKeyMultibase":"z6MkjxzETWiQ89KaWk8ToVzLwghqKSjAd3JFcudpbkTooi6G"}]}},[{"type":"DataIntegrityProof","cryptosuite":"eddsa-jcs-2022","verificationMethod":"did:key:z6MkjxzETWiQ89KaWk8ToVzLwghqKSjAd3JFcudpbkTooi6G","created":"2024-10-03T07:45:20Z","proofPurpose":"authentication","challenge":"1-QmWUentPDpw1fYJW61yet2rNG74APgdVz61SpxnsiWveqK","proofValue":"z5eAQUmsvCnSytgsydGNUAhPv9troenHdPpR5WDt4vNWRXPQDt9wQdVtj7njiRyvUPndeYC6RExZcUubxDvqcLG7i"}]] diff --git a/src/routes/did.ts b/src/routes/did.ts index 168e88e..76fc5c5 100644 --- a/src/routes/did.ts +++ b/src/routes/did.ts @@ -1,7 +1,7 @@ import { resolveDID } from '../method'; import { getFileUrl } from '../utils'; -export const getLatestDIDDoc = async ({params: {id}, set}: {params: {id: string;}; set: any;}) => { +export const getLatestDIDDoc = async ({params: {id}}: {params: {id: string;};}) => { try { const url = getFileUrl(id); const didLog = await (await fetch(url)).text(); @@ -14,6 +14,10 @@ export const getLatestDIDDoc = async ({params: {id}, set}: {params: {id: string; } } -export const getLogFile = ({params: {scid}}: {params: {scid: string}}) => { - return Bun.file(`./test/logs/${scid}/did.jsonl`); +export const getLogFileForSCID = async ({params: {scid}}: {params: {scid: string}}) => { + return await Bun.file(`./src/routes/${scid}/did.jsonl`).text(); +} + +export const getLogFileForBase = async () => { + return await Bun.file(`./src/routes/.well-known/did.jsonl`).text(); } \ No newline at end of file diff --git a/src/utils.ts b/src/utils.ts index 15fadb2..8b009c6 100644 --- a/src/utils.ts +++ b/src/utils.ts @@ -1,30 +1,80 @@ +import fs from 'node:fs'; import * as base58btc from '@interop/base58-universal' import { canonicalize } from 'json-canonicalize'; import { nanoid } from 'nanoid'; -import { createHash } from 'node:crypto'; import { sha256 } from 'multiformats/hashes/sha2' +import { resolveDID } from './method'; +import { join } from 'path'; -export const clone = (input: any) => JSON.parse(JSON.stringify(input)); +export const readLogFromDisk = (path: string): DIDLog => { + return fs.readFileSync(path, 'utf8').trim().split('\n').map(l => JSON.parse(l)); +} -export const getFileUrl = (id: string) => { - if (!id.startsWith('did:tdw:')) { - throw new Error(`${id} is not a valid did:tdw identifier`); +export const writeLogToDisk = (path: string, log: DIDLog) => { + fs.writeFileSync(path, JSON.stringify(log.shift()) + '\n'); + for (const entry of log) { + fs.appendFileSync(path, JSON.stringify(entry) + '\n'); } +} + +export const writeVerificationMethodToEnv = (verificationMethod: VerificationMethod) => { + const envFilePath = join(process.cwd(), '.env'); + const vmData = { + id: verificationMethod.id, + type: verificationMethod.type, + controller: verificationMethod.controller || '', + publicKeyMultibase: verificationMethod.publicKeyMultibase, + secretKeyMultibase: verificationMethod.secretKeyMultibase || '' + }; + + try { + let existingData: any[] = []; + if (fs.existsSync(envFilePath)) { + const envContent = fs.readFileSync(envFilePath, 'utf8'); + const match = envContent.match(/DID_VERIFICATION_METHODS=(.*)/); + if (match && match[1]) { + const decodedData = Buffer.from(match[1], 'base64').toString('utf8'); + existingData = JSON.parse(decodedData); + } + } + + existingData.push(vmData); + + const jsonData = JSON.stringify(existingData); + const encodedData = Buffer.from(jsonData).toString('base64'); + + const envContent = `DID_VERIFICATION_METHODS=${encodedData}\n`; + + fs.writeFileSync(envFilePath, envContent); + console.log('Verification method written to .env file successfully.'); + } catch (error) { + console.error('Error writing verification method to .env file:', error); + } +}; + +export const clone = (input: any) => JSON.parse(JSON.stringify(input)); + +export const getBaseUrl = (id: string) => { const parts = id.split(':'); - if (parts.length < 4) { + if (!id.startsWith('did:tdw:') || parts.length < 4) { throw new Error(`${id} is not a valid did:tdw identifier`); } - const scid = parts[2]; - const domain = parts.slice(3).join(':'); - + let domain = parts.slice(3).join('/'); + domain = domain.replace(/%2F/g, '/'); + domain = domain.replace(/%3A/g, ':'); const protocol = domain.includes('localhost') ? 'http' : 'https'; - - if (domain.includes('/')) { - return `${protocol}://${domain}/did.jsonl`; + return `${protocol}://${domain}`; +} + +export const getFileUrl = (id: string) => { + const baseUrl = getBaseUrl(id); + const url = new URL(baseUrl); + if (url.pathname !== '/') { + return `${baseUrl}/did.jsonl`; } - return `${protocol}://${domain}/.well-known/did.jsonl`; + return `${baseUrl}/.well-known/did.jsonl`; } export const createDate = (created?: Date | string) => new Date(created ?? Date.now()).toISOString().slice(0,-5)+'Z'; @@ -102,4 +152,92 @@ export const normalizeVMs = (verificationMethod: VerificationMethod[] | undefine })) } return {all}; +} + +export const collectWitnessProofs = async (witnesses: string[], log: DIDLog): Promise => { + const proofs: DataIntegrityProof[] = []; + + const timeout = (ms: number) => new Promise((_, reject) => + setTimeout(() => reject(new Error('Request timed out')), ms) + ); + + const collectProof = async (witness: string): Promise => { + const parts = witness.split(':'); + if (parts.length < 4) { + throw new Error(`${witness} is not a valid did:tdw identifier`); + } + + const witnessUrl = getBaseUrl(witness) + '/witness'; + try { + const response: any = await Promise.race([ + fetch(witnessUrl, { + method: 'POST', + headers: { + 'Content-Type': 'application/json', + }, + body: JSON.stringify({ log }), + }), + timeout(10000) // 10 second timeout + ]); + + if (response.ok) { + const data = await response.json(); + if (data.proof) { + proofs.push(data.proof); + } else { + console.warn(`Witness ${witnessUrl} did not provide a valid proof`); + } + } else { + console.warn(`Witness ${witnessUrl} responded with status: ${response.status}`); + } + } catch (error: any) { + if (error.message === 'Request timed out') { + console.error(`Request to witness ${witnessUrl} timed out`); + } else { + console.error(`Error collecting proof from witness ${witnessUrl}:`, error); + } + } + }; + + // Collect proofs from all witnesses concurrently + await Promise.all(witnesses.map(collectProof)); + + return proofs; +}; + +export const resolveVM = async (vm: string) => { + try { + if (vm.startsWith('did:key:')) { + return {publicKeyMultibase: vm.split('did:key:')[1].split('#')[0]} + } + else if (vm.startsWith('did:tdw:')) { + const url = getFileUrl(vm.split('#')[0]); + const didLog = await (await fetch(url)).text(); + const logEntries: DIDLog = didLog.trim().split('\n').map(l => JSON.parse(l)); + const {doc} = await resolveDID(logEntries, {verificationMethod: vm}); + return findVerificationMethod(doc, vm); + } + throw new Error(`Verification method ${vm} not found`); + } catch (e) { + throw new Error(`Error resolving VM ${vm}`) + } +} + +export const findVerificationMethod = (doc: any, vmId: string): VerificationMethod | null => { + // Check in the verificationMethod array + if (doc.verificationMethod && doc.verificationMethod.some((vm: any) => vm.id === vmId)) { + return doc.verificationMethod.find((vm: any) => vm.id === vmId); + } + + // Check in other verification method relationship arrays + const vmRelationships = ['authentication', 'assertionMethod', 'keyAgreement', 'capabilityInvocation', 'capabilityDelegation']; + for (const relationship of vmRelationships) { + if (doc[relationship]) { + if (doc[relationship].some((item: any) => item.id === vmId)) { + return doc[relationship].find((item: any) => item.id === vmId); + } + } + } + + return null; } \ No newline at end of file diff --git a/src/witness.ts b/src/witness.ts new file mode 100644 index 0000000..15d222b --- /dev/null +++ b/src/witness.ts @@ -0,0 +1,44 @@ +import { createSigner } from './cryptography'; +import { resolveDID } from './method'; + +// Parse the DID_VERIFICATION_METHODS environment variable +const verificationMethods = JSON.parse(Buffer.from(process.env.DID_VERIFICATION_METHODS || 'W10=', 'base64').toString('utf8')); +export async function createWitnessProof(log: DIDLog): Promise<{ proof: any } | { error: string }> { + if (!Array.isArray(log) || log.length < 1) { + return { error: 'Invalid log format' }; + } + + try { + const { did, doc, meta } = await resolveDID(log); + + // Find the corresponding verification method with secret key + const fullVM = verificationMethods.find((vm: any) => meta.witnesses.includes(vm.id.split('#')[0])); + if (!fullVM || !fullVM.secretKeyMultibase) { + return { error: 'Witness secret key not found' }; + } + + const logEntry = log[log.length - 1]; + const [versionId, timestamp, params, data] = logEntry; + + // Create a signer using the witness verification method + const signer = createSigner({ + type: 'authentication', + id: fullVM.id, + controller: fullVM.controller ?? fullVM.id.split('#')[0], + publicKeyMultibase: fullVM.publicKeyMultibase, + secretKeyMultibase: fullVM.secretKeyMultibase + }, false); + // Sign the log entry + const signedDoc = await signer( + (data as any).value, + versionId + ); + + return { + proof: signedDoc.proof + }; + } catch (error) { + console.error('Error in witness signing:', error); + return { error: 'Failed to create witness proof' }; + } +} \ No newline at end of file diff --git a/test/features.test.ts b/test/features.test.ts index c6eb323..97aa61a 100644 --- a/test/features.test.ts +++ b/test/features.test.ts @@ -1,10 +1,10 @@ import * as jsonpatch from 'fast-json-patch/index.mjs'; -import { beforeAll, expect, mock, test} from "bun:test"; +import { beforeAll, expect, test} from "bun:test"; import { createDID, resolveDID, updateDID } from "../src/method"; +import { mock } from "bun-bagel"; import { createSigner, generateEd25519VerificationMethod } from "../src/cryptography"; import { deriveHash, createDate, clone } from "../src/utils"; -import { newKeysAreValid } from '../src/assertions'; -import { createMockDIDLog } from './utils'; +import { createMockDIDLog} from './utils'; let log: DIDLog; let authKey1: VerificationMethod, @@ -389,4 +389,84 @@ test("updateDID should not allow moving a non-portable DID", async () => { expect(err).toBeDefined(); expect(err.message).toContain('Cannot move DID: portability is disabled'); -}); \ No newline at end of file +}); + +test("Create DID with witnesses", async () => { + mock("https://example.com/1234/witness", { method: "POST", response: { data: {proof: { + type: "DataIntegrityProof", + cryptosuite: "eddsa-jcs-2022", + verificationMethod: "did:tdw:1234:example.com:1234#key1", + created: "2023-06-18T21:19:10Z", + proofValue: "z58xkL6dbDRJjFVkBxhNHXNHFnZzZk...", + proofPurpose: "authentication" + } } }}); + mock("https://example.com/5678/witness", { method: "POST", response: { data: {proof: { + type: "DataIntegrityProof", + cryptosuite: "eddsa-jcs-2022", + verificationMethod: "did:tdw:5678:example.com:5678#key1", + created: "2023-06-18T21:19:10Z", + proofValue: "z58xkL6dbDRJjFVkBxhNHXNHFnZzZk...", + proofPurpose: "authentication" + } } }}); + const authKey = await generateEd25519VerificationMethod('authentication'); + const { did, doc, meta, log } = await createDID({ + domain: 'example.com', + signer: createSigner(authKey), + updateKeys: [authKey.publicKeyMultibase!], + verificationMethods: [authKey], + witnesses: ['did:tdw:1234:example.com:1234', 'did:tdw:5678:example.com:5678'], + witnessThreshold: 1 + }); + + expect(meta.witnesses).toHaveLength(2); + expect(meta.witnessThreshold).toBe(1); + expect(log[0][4]!.length).toBe(3); +}); + +test("Update DID with witnesses", async () => { + mock("https://example.com/1234/witness", { method: "POST", response: { data: {proof: { + type: "DataIntegrityProof", + cryptosuite: "eddsa-jcs-2022", + verificationMethod: "did:tdw:1234:example.com:1234#key1", + created: "2023-06-18T21:19:10Z", + proofValue: "z58xkL6dbDRJjFVkBxhNHXNHFnZzZk...", + proofPurpose: "authentication" + } } }}); + mock("https://example.com/5678/witness", { method: "POST", response: { data: {proof: { + type: "DataIntegrityProof", + cryptosuite: "eddsa-jcs-2022", + verificationMethod: "did:tdw:5678:example.com:5678#key1", + created: "2023-06-18T21:19:10Z", + proofValue: "z58xkL6dbDRJjFVkBxhNHXNHFnZzZk...", + proofPurpose: "authentication" + } } }}); + const authKey = await generateEd25519VerificationMethod('authentication'); + const { did, doc, meta, log } = await createDID({ + domain: 'example.com', + signer: createSigner(authKey), + updateKeys: [authKey.publicKeyMultibase!], + verificationMethods: [authKey] + }); + + const { doc: updatedDoc, meta: updatedMeta, log: updatedLog } = await updateDID({ + log, + signer: createSigner(authKey), + updateKeys: [authKey.publicKeyMultibase!], + witnesses: ['did:tdw:1234:example.com:1234', 'did:tdw:5678:example.com:5678'], + witnessThreshold: 2 + }); + + expect(updatedMeta.witnesses).toHaveLength(2); + expect(updatedMeta.witnessThreshold).toBe(2); + expect(updatedLog[updatedLog.length - 1][4]!.length).toBe(1); +}); + +// test("Resolve DID with invalid witness proofs", async () => { +// // ... setup code to create DID with witnesses + +// // Modify the log to have invalid witness proofs +// const invalidLog = [...initialLog]; +// invalidLog[invalidLog.length - 1][5] = []; // Empty witness proofs + +// await expect(resolveDID(invalidLog)).rejects.toThrow('Invalid witness proofs'); +// }); \ No newline at end of file diff --git a/test/fixtures/not-authorized.log b/test/fixtures/not-authorized.log index c80ac8d..f3e3319 100644 --- a/test/fixtures/not-authorized.log +++ b/test/fixtures/not-authorized.log @@ -1,2 +1,2 @@ -["1-QmVkYA5MHH4qBuggwjCqRbZMpbCHxq3TbQVcKKQrJ9ve1L","2024-08-13T21:22:59Z",{"method":"did:tdw:0.3","scid":"QmVkYA5MHH4qBuggwjCqRbZMpbCHxq3TbQVcKKQrJ9ve1L","updateKeys":["z6Mku2zKKQWkYqT3nR8yMtsooALsTGP8Dm2vF6A1uhFck4UK"],"portable":false},{"value":{"@context":["https://www.w3.org/ns/did/v1","https://w3id.org/security/multikey/v1"],"id":"did:tdw:QmVkYA5MHH4qBuggwjCqRbZMpbCHxq3TbQVcKKQrJ9ve1L:example.com","controller":"did:tdw:QmVkYA5MHH4qBuggwjCqRbZMpbCHxq3TbQVcKKQrJ9ve1L:example.com","authentication":["did:tdw:QmVkYA5MHH4qBuggwjCqRbZMpbCHxq3TbQVcKKQrJ9ve1L:example.com#uhFck4UK"],"assertionMethod":["did:tdw:QmVkYA5MHH4qBuggwjCqRbZMpbCHxq3TbQVcKKQrJ9ve1L:example.com#h2igGFWF"],"verificationMethod":[{"id":"did:tdw:QmVkYA5MHH4qBuggwjCqRbZMpbCHxq3TbQVcKKQrJ9ve1L:example.com#uhFck4UK","controller":"did:tdw:QmVkYA5MHH4qBuggwjCqRbZMpbCHxq3TbQVcKKQrJ9ve1L:example.com","type":"Multikey","publicKeyMultibase":"z6Mku2zKKQWkYqT3nR8yMtsooALsTGP8Dm2vF6A1uhFck4UK"},{"id":"did:tdw:QmVkYA5MHH4qBuggwjCqRbZMpbCHxq3TbQVcKKQrJ9ve1L:example.com#h2igGFWF","controller":"did:tdw:QmVkYA5MHH4qBuggwjCqRbZMpbCHxq3TbQVcKKQrJ9ve1L:example.com","type":"Multikey","publicKeyMultibase":"z6MknGUfb5zDc9iiCm2SHwvHmqhWkcVSGL2txLQLh2igGFWF"}]}},[{"type":"DataIntegrityProof","cryptosuite":"eddsa-jcs-2022","verificationMethod":"did:key:z6Mku2zKKQWkYqT3nR8yMtsooALsTGP8Dm2vF6A1uhFck4UK","created":"2024-08-13T21:22:59Z","proofPurpose":"authentication","challenge":"1-QmVkYA5MHH4qBuggwjCqRbZMpbCHxq3TbQVcKKQrJ9ve1L","proofValue":"z4cdGwyviPCheRx17T6uvSsspNQ9PYQsSXXizgtT9rd4Kso5zSUBcJAEZAKVEDQ2CoFS1SunipV8zzzCACwSEbAJq"}]] -["2-QmWqUoM6gxmkhR3JCQF38XfJXf3tLgGE27jNgh6TuMGt1f","2024-08-13T21:22:59Z",{},{"patch":[{"op":"replace","path":"/verificationMethod/1/publicKeyMultibase","value":"z6MkiYFyZeQLQKXJ55oo8EhXApW1JuGsvaVis9daBDLxaaSt"},{"op":"replace","path":"/verificationMethod/1/id","value":"did:tdw:QmVkYA5MHH4qBuggwjCqRbZMpbCHxq3TbQVcKKQrJ9ve1L:example.com#BDLxaaSt"},{"op":"replace","path":"/verificationMethod/0/publicKeyMultibase","value":"z6MkoBgGDK7pncArQP6edbRQDuh3n8SdYrTStxzUjCAMbfHQ"},{"op":"replace","path":"/verificationMethod/0/id","value":"did:tdw:QmVkYA5MHH4qBuggwjCqRbZMpbCHxq3TbQVcKKQrJ9ve1L:example.com#jCAMbfHQ"},{"op":"replace","path":"/assertionMethod/0","value":"did:tdw:QmVkYA5MHH4qBuggwjCqRbZMpbCHxq3TbQVcKKQrJ9ve1L:example.com#BDLxaaSt"},{"op":"replace","path":"/authentication/0","value":"did:tdw:QmVkYA5MHH4qBuggwjCqRbZMpbCHxq3TbQVcKKQrJ9ve1L:example.com#jCAMbfHQ"},{"op":"replace","path":"/controller","value":["did:tdw:QmVkYA5MHH4qBuggwjCqRbZMpbCHxq3TbQVcKKQrJ9ve1L:example.com"]}]},[{"type":"DataIntegrityProof","cryptosuite":"eddsa-jcs-2022","verificationMethod":"did:key:z6MknGUfb5zDc9iiCm2SHwvHmqhWkcVSGL2txLQLh2igGFWF","created":"2024-08-13T21:22:59Z","proofPurpose":"authentication","challenge":"2-QmWqUoM6gxmkhR3JCQF38XfJXf3tLgGE27jNgh6TuMGt1f","proofValue":"z78GaGKF3UUik3pCpizHG8XqWLJxrzSwX5cdJEAszjB6XaUccFxeH8j6BybneUF7w2m94RQ5GbzEkzwG96WipoZA"}]] +["1-QmdK2cazkgA3Qxr3oUdZzZyJQ9tuqXrcELSbqbff2wgJU5","2024-10-04T06:34:25Z",{"method":"did:tdw:0.3","scid":"QmdK2cazkgA3Qxr3oUdZzZyJQ9tuqXrcELSbqbff2wgJU5","updateKeys":["z6MkpM4hdAMAFSf4oAZ3r9Pz5cSzZAEb9AxKbWNx7PEdLthC"],"portable":false,"prerotation":false,"nextKeyHashes":[],"witnesses":[],"witnessThreshold":0,"deactivated":false},{"value":{"@context":["https://www.w3.org/ns/did/v1","https://w3id.org/security/multikey/v1"],"id":"did:tdw:QmdK2cazkgA3Qxr3oUdZzZyJQ9tuqXrcELSbqbff2wgJU5:example.com","controller":"did:tdw:QmdK2cazkgA3Qxr3oUdZzZyJQ9tuqXrcELSbqbff2wgJU5:example.com","authentication":["did:tdw:QmdK2cazkgA3Qxr3oUdZzZyJQ9tuqXrcELSbqbff2wgJU5:example.com#7PEdLthC"],"assertionMethod":["did:tdw:QmdK2cazkgA3Qxr3oUdZzZyJQ9tuqXrcELSbqbff2wgJU5:example.com#ko8yVqZf"],"verificationMethod":[{"id":"did:tdw:QmdK2cazkgA3Qxr3oUdZzZyJQ9tuqXrcELSbqbff2wgJU5:example.com#7PEdLthC","controller":"did:tdw:QmdK2cazkgA3Qxr3oUdZzZyJQ9tuqXrcELSbqbff2wgJU5:example.com","type":"Multikey","publicKeyMultibase":"z6MkpM4hdAMAFSf4oAZ3r9Pz5cSzZAEb9AxKbWNx7PEdLthC"},{"id":"did:tdw:QmdK2cazkgA3Qxr3oUdZzZyJQ9tuqXrcELSbqbff2wgJU5:example.com#ko8yVqZf","controller":"did:tdw:QmdK2cazkgA3Qxr3oUdZzZyJQ9tuqXrcELSbqbff2wgJU5:example.com","type":"Multikey","publicKeyMultibase":"z6MktbW9Q3SRRZH9KGgi992FAP3zsHbRNRVghua1ko8yVqZf"}]}},[{"type":"DataIntegrityProof","cryptosuite":"eddsa-jcs-2022","verificationMethod":"did:key:z6MkpM4hdAMAFSf4oAZ3r9Pz5cSzZAEb9AxKbWNx7PEdLthC","created":"2024-10-04T06:34:25Z","proofPurpose":"authentication","challenge":"1-QmdK2cazkgA3Qxr3oUdZzZyJQ9tuqXrcELSbqbff2wgJU5","proofValue":"z2dUb12ES25JLmJ6STDD6LKvwS3Ro8KPHp7L1oixyitcJy8CwqK4NjEWimMmGKGz7vYz5Ryod9JL34voVXsMnFNzk"}]] +["2-Qma6VjkU24hF7mvnjEWc1eycwxnD3PQzrGHrm6que5wcTY","2024-10-04T06:34:25Z",{"witnesses":[],"witnessThreshold":0},{"patch":[{"op":"replace","path":"/verificationMethod/1/publicKeyMultibase","value":"z6MkoiGV33WxKg5eGAB92eekgropaE98PUqRW92k2GfKGfca"},{"op":"replace","path":"/verificationMethod/1/id","value":"did:tdw:QmdK2cazkgA3Qxr3oUdZzZyJQ9tuqXrcELSbqbff2wgJU5:example.com#2GfKGfca"},{"op":"replace","path":"/verificationMethod/0/publicKeyMultibase","value":"z6MksuN1RkD78bPEnbc8UdFAetgKjKHJyVu6NypGfJypQe4T"},{"op":"replace","path":"/verificationMethod/0/id","value":"did:tdw:QmdK2cazkgA3Qxr3oUdZzZyJQ9tuqXrcELSbqbff2wgJU5:example.com#fJypQe4T"},{"op":"replace","path":"/assertionMethod/0","value":"did:tdw:QmdK2cazkgA3Qxr3oUdZzZyJQ9tuqXrcELSbqbff2wgJU5:example.com#2GfKGfca"},{"op":"replace","path":"/authentication/0","value":"did:tdw:QmdK2cazkgA3Qxr3oUdZzZyJQ9tuqXrcELSbqbff2wgJU5:example.com#fJypQe4T"},{"op":"replace","path":"/controller","value":["did:tdw:QmdK2cazkgA3Qxr3oUdZzZyJQ9tuqXrcELSbqbff2wgJU5:example.com"]}]},[{"type":"DataIntegrityProof","cryptosuite":"eddsa-jcs-2022","verificationMethod":"did:key:z6MktbW9Q3SRRZH9KGgi992FAP3zsHbRNRVghua1ko8yVqZf","created":"2024-10-04T06:34:25Z","proofPurpose":"authentication","challenge":"2-Qma6VjkU24hF7mvnjEWc1eycwxnD3PQzrGHrm6que5wcTY","proofValue":"z4r9YokjSkvKAnt9DgMDN8CwvKAgYG1G5tHcydJdFGFDkpsdJio2hTGpgPHvLstBjAFeFH5x24jviuBD4u7AHxKu2"}]] diff --git a/test/happy-path.test.ts b/test/happy-path.test.ts index 4a803c0..f775b98 100644 --- a/test/happy-path.test.ts +++ b/test/happy-path.test.ts @@ -1,7 +1,7 @@ import { test, expect, beforeAll } from "bun:test"; import { createDID, deactivateDID, resolveDID, updateDID } from "../src/method"; import fs from 'node:fs'; -import { readLogFromDisk } from "./utils"; +import { readLogFromDisk } from "../src/utils"; import { createVMID, deriveHash } from "../src/utils"; import { METHOD } from "../src/constants"; import { createSigner, generateEd25519VerificationMethod, generateX25519VerificationMethod } from "../src/cryptography"; diff --git a/test/must.test.ts b/test/must.test.ts index 6159b2a..8d1d076 100644 --- a/test/must.test.ts +++ b/test/must.test.ts @@ -63,8 +63,8 @@ describe("did:tdw normative tests", async () => { expect(resolved.meta.deactivated).toBe(true); }); - test("Resolver encountering 'deactivated': true MUST return deactivated in metadata (negative)", async () => { + test("Resolver encountering 'deactivated': false MUST return deactivated in metadata (negative)", async () => { const resolved = await resolveDID(newLog1); - expect(resolved.meta.deactivated).toBeUndefined(); + expect(resolved.meta.deactivated).toBeFalse(); }); }); diff --git a/test/not-so-happy-path.test.ts b/test/not-so-happy-path.test.ts index 06b6fe0..6040356 100644 --- a/test/not-so-happy-path.test.ts +++ b/test/not-so-happy-path.test.ts @@ -1,5 +1,5 @@ import { expect, test } from "bun:test"; -import { readLogFromDisk, writeLogToDisk } from "./utils"; +import { readLogFromDisk, writeLogToDisk } from "../src/utils"; import { createDID, resolveDID, updateDID } from "../src/method"; import { createSigner, generateEd25519VerificationMethod } from "../src/cryptography"; diff --git a/test/resolve.http b/test/resolve.http index 2ee4daf..d5020f5 100644 --- a/test/resolve.http +++ b/test/resolve.http @@ -1,5 +1,8 @@ +http://localhost:8000/health -http://localhost:8000/e9q26uqq5hg20yydmzq63fkcx7af +### + +http://localhost:8000/.well-known/did.jsonl ### diff --git a/test/resolve.test.ts b/test/resolve.test.ts new file mode 100644 index 0000000..7e03b60 --- /dev/null +++ b/test/resolve.test.ts @@ -0,0 +1,108 @@ +import { describe, expect, test, beforeAll } from "bun:test"; +import { createDID, resolveDID, updateDID } from "../src/method"; +import { createSigner, generateEd25519VerificationMethod, generateX25519VerificationMethod } from "../src/cryptography"; +import { clone } from "../src/utils"; + +describe("resolveDID with verificationMethod", () => { + let initialDID: string; + let fullLog: DIDLog; + let authKey1: VerificationMethod, authKey2: VerificationMethod, keyAgreementKey: VerificationMethod; + + beforeAll(async () => { + authKey1 = await generateEd25519VerificationMethod('authentication'); + authKey2 = await generateEd25519VerificationMethod('authentication'); + keyAgreementKey = await generateX25519VerificationMethod('keyAgreement'); + + // Create initial DID + const { did, log } = await createDID({ + domain: 'example.com', + signer: createSigner(authKey1), + updateKeys: [authKey1.publicKeyMultibase!], + verificationMethods: [authKey1], + created: new Date('2023-01-01T00:00:00Z') + }); + initialDID = did; + fullLog = clone(log); + + // Update DID to add a new authentication key + const updateResult1 = await updateDID({ + log: fullLog, + signer: createSigner(authKey1), + updateKeys: [authKey1.publicKeyMultibase!], + verificationMethods: [authKey1, authKey2], + updated: new Date('2023-02-01T00:00:00Z') + }); + fullLog = updateResult1.log; + + // Update DID to add a keyAgreement key + const updateResult2 = await updateDID({ + log: fullLog, + signer: createSigner(authKey1), + updateKeys: [authKey1.publicKeyMultibase!], + verificationMethods: [authKey1, authKey2, keyAgreementKey], + updated: new Date('2023-03-01T00:00:00Z') + }); + fullLog = updateResult2.log; + }); + + test("Resolve DID with initial authentication key", async () => { + const vmId = `${initialDID}#${authKey1.publicKeyMultibase!.slice(-8)}`; + const { doc, meta } = await resolveDID(fullLog, { verificationMethod: vmId }); + + expect(doc.verificationMethod).toHaveLength(1); + expect(doc.verificationMethod[0].publicKeyMultibase).toBe(authKey1.publicKeyMultibase); + expect(meta.versionId.split('-')[0]).toBe("1"); + }); + + test("Resolve DID with second authentication key", async () => { + const vmId = `${initialDID}#${authKey2.publicKeyMultibase!.slice(-8)}`; + const { doc, meta } = await resolveDID(fullLog, { verificationMethod: vmId }); + + expect(doc.verificationMethod).toHaveLength(2); + expect(doc.verificationMethod[1].publicKeyMultibase).toBe(authKey2.publicKeyMultibase); + expect(meta.versionId.split('-')[0]).toBe("2"); + }); + + test("Resolve DID with keyAgreement key", async () => { + const vmId = `${initialDID}#${keyAgreementKey.publicKeyMultibase!.slice(-8)}`; + const { doc, meta } = await resolveDID(fullLog, { verificationMethod: vmId }); + + expect(doc.verificationMethod).toHaveLength(3); + expect(doc.verificationMethod[2].publicKeyMultibase).toBe(keyAgreementKey.publicKeyMultibase); + expect(meta.versionId.split('-')[0]).toBe("3"); + }); + + test("Resolve DID with non-existent verification method", async () => { + const vmId = `${initialDID}#nonexistent`; + await expect(resolveDID(fullLog, { verificationMethod: vmId })).rejects.toThrow("DID with options"); + }); + + test("Resolve DID with verification method and version time", async () => { + const vmId = `${initialDID}#${authKey2.publicKeyMultibase!.slice(-8)}`; + const { doc, meta } = await resolveDID(fullLog, { + verificationMethod: vmId, + versionTime: new Date('2023-02-15T00:00:00Z') + }); + + expect(doc.verificationMethod).toHaveLength(2); + expect(doc.verificationMethod[1].publicKeyMultibase).toBe(authKey2.publicKeyMultibase); + expect(meta.versionId.split('-')[0]).toBe("2"); + }); + + test("Throw error when both verificationMethod and versionNumber are specified", async () => { + const vmId = `${initialDID}#${authKey1.publicKeyMultibase!.slice(-8)}`; + let error: Error | null = null; + + try { + await resolveDID(fullLog, { + verificationMethod: vmId, + versionNumber: 2 + }); + } catch (e) { + error = e as Error; + } + + expect(error).not.toBeNull(); + expect(error?.message).toBe("Cannot specify both verificationMethod and version number/id"); + }); +}); \ No newline at end of file diff --git a/test/utils.ts b/test/utils.ts index 858877e..f3cbb70 100644 --- a/test/utils.ts +++ b/test/utils.ts @@ -1,17 +1,5 @@ -import fs from 'node:fs'; import { deriveHash } from '../src/utils'; -export const readLogFromDisk = (path: string): DIDLog => { - return fs.readFileSync(path, 'utf8').trim().split('\n').map(l => JSON.parse(l)); -} - -export const writeLogToDisk = (path: string, log: DIDLog) => { - fs.writeFileSync(path, JSON.stringify(log.shift()) + '\n'); - for (const entry of log) { - fs.appendFileSync(path, JSON.stringify(entry) + '\n'); - } -} - export function createMockDIDLog(entries: Partial[]): DIDLog { return entries.map((entry, index) => { const versionNumber = index + 1; @@ -24,4 +12,4 @@ export function createMockDIDLog(entries: Partial[]): DIDLog { ]; return mockEntry; }); -} \ No newline at end of file +} diff --git a/test/witness.test.ts b/test/witness.test.ts new file mode 100644 index 0000000..10cda0d --- /dev/null +++ b/test/witness.test.ts @@ -0,0 +1,117 @@ +import { beforeAll, describe, expect, test } from "bun:test"; +import { createDID, resolveDID, updateDID } from "../src/method"; +import { createSigner, generateEd25519VerificationMethod } from "../src/cryptography"; + +let WITNESS_SCID = ""; +const WITNESS_SERVER_URL = "http://localhost:8000"; // Update this to match your witness server URL +const WITNESS_DOMAIN = WITNESS_SERVER_URL.split('//')[1].replace(':', '%3A'); + +const getWitnessDID = async () => { + try { + const response = await fetch(`${WITNESS_SERVER_URL}/.well-known/did.jsonl`); + return response.ok && (await response.json()); + } catch (error) { + return false; + } +} + +const isWitnessServerRunning = async () => { + try { + const response = await fetch(`${WITNESS_SERVER_URL}/health`); + return response.ok; + } catch (error) { + return false; + } +}; + +const runWitnessTests = async () => { + const serverRunning = await isWitnessServerRunning(); + + if (!serverRunning) { + describe("Witness functionality", () => { + test.skip("Witness server is not running", () => { + // This test will be skipped and shown in the test output + }); + }); + return; + } + + describe("Witness functionality", () => { + let authKey: VerificationMethod; + let initialDID: { did: string; doc: any; meta: any; log: DIDLog }; + + beforeAll(async () => { + authKey = await generateEd25519VerificationMethod('authentication'); + const didLog = await getWitnessDID(); + const {did, meta} = await resolveDID([didLog] as DIDLog); + WITNESS_SCID = meta.scid; + console.log(`Witness DID ${did} found`); + }); + + test("Create DID with witness", async () => { + const domain = WITNESS_SERVER_URL.split('//')[1].replace(':', '%3A'); + initialDID = await createDID({ + domain, + signer: createSigner(authKey), + updateKeys: [authKey.publicKeyMultibase!], + verificationMethods: [authKey], + witnesses: [`did:tdw:${WITNESS_SCID}:${WITNESS_DOMAIN}`], + witnessThreshold: 1 + }); + const resolved = await resolveDID(initialDID.log); + + expect(resolved.did).toBe(initialDID.did); + expect(initialDID.meta.witnesses).toHaveLength(1); + expect(initialDID.meta.witnessThreshold).toBe(1); + expect(initialDID.log[0][4]).toHaveLength(2); // Controller proof + witness proof + }); + + test("Update DID with witness", async () => { + const newAuthKey = await generateEd25519VerificationMethod('authentication'); + const updatedDID = await updateDID({ + log: initialDID.log, + signer: createSigner(authKey), + updateKeys: [newAuthKey.publicKeyMultibase!], + verificationMethods: [newAuthKey], + }); + + expect(updatedDID.meta.witnesses).toHaveLength(1); + expect(updatedDID.meta.witnessThreshold).toBe(1); + expect(updatedDID.log[updatedDID.log.length - 1][4]).toHaveLength(2); // Controller proof + witness proof + }); + + test("Witness signing with environment variable key", async () => { + if (!process.env.WITNESS_PRIVATE_KEY) { + test.skip("WITNESS_PRIVATE_KEY environment variable not set", () => {}); + return; + } + + const testDID = await createDID({ + domain: 'example.com', + signer: createSigner(authKey), + updateKeys: [authKey.publicKeyMultibase!], + verificationMethods: [authKey], + witnesses: [`did:tdw:${WITNESS_SERVER_URL.split('//')[1]}`], + witnessThreshold: 1 + }); + + const response = await fetch(`${WITNESS_SERVER_URL}/witness`, { + method: 'POST', + headers: { + 'Content-Type': 'application/json', + }, + body: JSON.stringify({ log: testDID.log }), + }); + + expect(response.ok).toBe(true); + + const data: any = await response.json(); + expect(data.proof).toBeDefined(); + expect(data.proof.type).toBe('DataIntegrityProof'); + expect(data.proof.cryptosuite).toBe('eddsa-jcs-2022'); + expect(data.proof.proofPurpose).toBe('authentication'); + }); + }); +}; + +runWitnessTests(); \ No newline at end of file