Skip to content

Commit

Permalink
Fix proofChain local issuer and implement tests
Browse files Browse the repository at this point in the history
Signed-off-by: PatStLouis <[email protected]>
  • Loading branch information
PatStLouis committed Dec 16, 2024
1 parent d8b6034 commit f608167
Show file tree
Hide file tree
Showing 3 changed files with 35 additions and 27 deletions.
22 changes: 18 additions & 4 deletions tests/75-proof-chains.js
Original file line number Diff line number Diff line change
Expand Up @@ -5,10 +5,13 @@
import {
addProof,
createVc,
generateProofId
} from './vc-issuer/index.js';
import {
setupReportableTestSuite,
setupRow,
verifyError,
verifySuccess
} from './helpers.js';
import {endpoints} from 'vc-test-suite-implementations';

Expand All @@ -20,6 +23,7 @@ const {match: issuers} = endpoints.filterByTag({
tags: cryptosuites,
property: 'issuers'
});
issuers;

const {match: verifiers} = endpoints.filterByTag({
tags: cryptosuites,
Expand All @@ -34,28 +38,38 @@ describe('Proof Chains', function() {
let issuedCredential;
let issuedProofSet;
let issuedProofChain;
let negativeFixture;
before(async function() {
// signedCredential = await addProof(
// generateCredential());
issuedCredential = await createVc();
issuedProofChain = await addProof(
structuredClone(issuedCredential), issuedCredential.proof[0].id);
issuedProofSet = await addProof(
structuredClone(issuedCredential));
issuedProofChain = await addProof(
structuredClone(issuedProofSet), issuedCredential.proof[0].id);
issuedProofSet;
});
beforeEach(setupRow);
it('If a proof with id value equal to the value of previousProof ' +
'does not exist in allProofs, an error MUST be raised and SHOULD ' +
'convey an error type of PROOF_VERIFICATION_ERROR.',
async function() {
this.test.link = 'https://www.w3.org/TR/vc-data-integrity/#verify-proof-sets-and-chains';
await verifySuccess(verifier, issuedProofChain);

negativeFixture = structuredClone(issuedProofChain);
negativeFixture.proof[1].id = generateProofId();
await verifyError(verifier, negativeFixture);
});
it('If any element of previousProof list has an id attribute ' +
'value that does not match the id attribute value of any ' +
'element of allProofs, an error MUST be raised and SHOULD ' +
'convey an error type of PROOF_VERIFICATION_ERROR.',
async function() {
this.test.link = 'https://www.w3.org/TR/vc-data-integrity/#verify-proof-sets-and-chains';
await verifySuccess(verifier, issuedProofChain);

negativeFixture = structuredClone(issuedProofChain);
negativeFixture.proof[1].id = generateProofId();
await verifyError(verifier, negativeFixture);
});
});
}
Expand Down
9 changes: 9 additions & 0 deletions tests/vc-issuer/documentLoader.js
Original file line number Diff line number Diff line change
Expand Up @@ -13,6 +13,15 @@ loader.addStatic(
named.get('v2').context
);

loader.addStatic(
'https://www.w3.org/ns/credentials/examples/v2',
{
'@context': {
'@vocab': 'https://www.w3.org/ns/credentials/examples#'
}
}
);

loader.addStatic(
dataIntegrityContext.constants.CONTEXT_URL,
dataIntegrityContext.contexts.get(dataIntegrityContext.constants.CONTEXT_URL)
Expand Down
31 changes: 8 additions & 23 deletions tests/vc-issuer/index.js
Original file line number Diff line number Diff line change
Expand Up @@ -6,20 +6,15 @@ import * as base58 from 'base58-universal';
import * as EcdsaMultikey from '@digitalbazaar/ecdsa-multikey';
import * as rdfCanonize from 'rdf-canonize';
import crypto from 'crypto';
// import {DataIntegrityProof} from '@digitalbazaar/data-integrity';
// import {cryptosuite as ecdsaRdfc2019Cryptosuite} from
// '@digitalbazaar/ecdsa-rdfc-2019-cryptosuite';
// import jsigs from 'jsonld-signatures';
import jsonld from 'jsonld';
// const {purposes: {AssertionProofPurpose}} = jsigs;
import {loader} from './documentLoader.js';

const documentLoader = loader.build();
const publicKeyMultibase = 'zDnaekGZTbQBerwcehBSXLqAg6s55hVEBms1zFy89VHXtJSa9';
const secretKeyMultibase = 'z42tqZ5smVag3DtDhjY9YfVwTMyVHW6SCHJi2ZMrD23DGYS3';
const controller = `did:key:${publicKeyMultibase}`;

function generateProofId() {
export function generateProofId() {
return `urn:uuid:${crypto.randomUUID()}`;
}

Expand All @@ -40,11 +35,6 @@ const keyPair = await EcdsaMultikey.from({
secretKeyMultibase
});

// // create suite
// const suite = new DataIntegrityProof({
// signer: keyPair.signer(), cryptosuite: ecdsaRdfc2019Cryptosuite
// });

// create the unsigned credential
const unsignedCredential = {
'@context': ['https://www.w3.org/ns/credentials/v2'],
Expand All @@ -58,12 +48,6 @@ export async function createVc() {
}

export async function addProof(credential, previousProof = null) {
// const unsecuredDocument = structuredClone(unsignedCredential);
// const signedDocument = await jsigs.sign(credential, {
// suite,
// purpose: new AssertionProofPurpose(),
// documentLoader
// });
const proofSet = credential?.proof || [];
const unsecuredDocument = structuredClone(credential);
delete unsecuredDocument.proof;
Expand Down Expand Up @@ -92,15 +76,15 @@ export async function createProof(unsecuredDocument, options) {

const proofConfig = await canonize(options);
const proofConfigHash =
crypto.createHash('sha256').update(proofConfig).digest();
crypto.createHash('sha256').update(proofConfig).digest('hex');

const transformedData = await canonize(unsecuredDocument);
const transformedDataHash =
crypto.createHash('sha256').update(transformedData).digest();

const hashData = Buffer.concat([proofConfigHash, transformedDataHash]);
crypto.createHash('sha256').update(transformedData).digest('hex');

const proofbytes = await keyPair.signer().sign({data: hashData});
const hashData = proofConfigHash + transformedDataHash;
const proofbytes = await keyPair.signer().sign(
{data: Uint8Array.from(Buffer.from(hashData, 'hex'))});

proof.proofValue = `z${base58.encode(proofbytes)}`;

Expand All @@ -111,7 +95,6 @@ async function canonize(input) {
const options = {
algorithm: 'RDFC-1.0',
base: null,
// format: 'application/n-quads',
documentLoader,
safe: true,
skipExpansion: false,
Expand All @@ -120,5 +103,7 @@ async function canonize(input) {
messageDigestAlgorithm: 'SHA-256',
};
const dataset = await jsonld.toRDF(input, options);
delete options.produceGeneralizedRdf;
options.format = 'application/n-quads';
return rdfCanonize.canonize(dataset, options);
}

0 comments on commit f608167

Please sign in to comment.