diff --git a/.vscode/settings.json b/.vscode/settings.json
new file mode 100644
index 0000000..d6f3acf
--- /dev/null
+++ b/.vscode/settings.json
@@ -0,0 +1,5 @@
+{
+ "cSpell.words": [
+ "unlinkable"
+ ]
+}
\ No newline at end of file
diff --git a/TestVectors/BBSKeyMaterial.json b/TestVectors/BBSKeyMaterial.json
new file mode 100644
index 0000000..23c458a
--- /dev/null
+++ b/TestVectors/BBSKeyMaterial.json
@@ -0,0 +1,5 @@
+{
+ "publicKeyHex": "a4ef1afa3da575496f122b9b78b8c24761531a8a093206ae7c45b80759c168ba4f7a260f9c3367b6c019b4677841104b10665edbe70ba3ebe7d9cfbffbf71eb016f70abfbb163317f372697dc63efd21fc55764f63926a8f02eaea325a2a888f",
+ "privateKeyHex": "66d36e118832af4c5e28b2dfe1b9577857e57b042a33e06bdea37b811ed09ee0",
+ "hmacKeyString": "00112233445566778899AABBCCDDEEFF00112233445566778899AABBCCDDEEFF"
+}
\ No newline at end of file
diff --git a/TestVectors/addBaseDocCanon.json b/TestVectors/addBaseDocCanon.json
new file mode 100644
index 0000000..4d17034
--- /dev/null
+++ b/TestVectors/addBaseDocCanon.json
@@ -0,0 +1,29 @@
+[
+ "_:c14n0 \"CompFoil170\" .\n",
+ "_:c14n0 \"Wailea\" .\n",
+ "_:c14n0 \"2022\"^^ .\n",
+ "_:c14n1 \"Lahaina\" .\n",
+ "_:c14n1 \"7.8E0\"^^ .\n",
+ "_:c14n1 \"2023\"^^ .\n",
+ "_:c14n2 .\n",
+ "_:c14n2 _:c14n6 .\n",
+ "_:c14n3 \"Kanaha Custom\" .\n",
+ "_:c14n3 \"Wailea\" .\n",
+ "_:c14n3 \"2019\"^^ .\n",
+ "_:c14n4 \"Lahaina\" .\n",
+ "_:c14n4 \"7\"^^ .\n",
+ "_:c14n4 \"2020\"^^ .\n",
+ "_:c14n5 \"Kihei\" .\n",
+ "_:c14n5 \"5.5E0\"^^ .\n",
+ "_:c14n5 \"2023\"^^ .\n",
+ "_:c14n6 _:c14n0 .\n",
+ "_:c14n6 _:c14n3 .\n",
+ "_:c14n6 \"Earth101\" .\n",
+ "_:c14n6 _:c14n1 .\n",
+ "_:c14n6 _:c14n4 .\n",
+ "_:c14n6 _:c14n5 .\n",
+ "_:c14n6 _:c14n7 .\n",
+ "_:c14n7 \"Lahaina\" .\n",
+ "_:c14n7 \"6.1E0\"^^ .\n",
+ "_:c14n7 \"2023\"^^ .\n"
+]
\ No newline at end of file
diff --git a/TestVectors/addBaseDocHMACCanon.json b/TestVectors/addBaseDocHMACCanon.json
new file mode 100644
index 0000000..9247374
--- /dev/null
+++ b/TestVectors/addBaseDocHMACCanon.json
@@ -0,0 +1,29 @@
+[
+ "_:b0 \"Lahaina\" .\n",
+ "_:b0 \"6.1E0\"^^ .\n",
+ "_:b0 \"2023\"^^ .\n",
+ "_:b1 \"Lahaina\" .\n",
+ "_:b1 \"7.8E0\"^^ .\n",
+ "_:b1 \"2023\"^^ .\n",
+ "_:b2 \"CompFoil170\" .\n",
+ "_:b2 \"Wailea\" .\n",
+ "_:b2 \"2022\"^^ .\n",
+ "_:b3 \"Kihei\" .\n",
+ "_:b3 \"5.5E0\"^^ .\n",
+ "_:b3 \"2023\"^^ .\n",
+ "_:b4 .\n",
+ "_:b4 _:b6 .\n",
+ "_:b5 \"Lahaina\" .\n",
+ "_:b5 \"7\"^^ .\n",
+ "_:b5 \"2020\"^^ .\n",
+ "_:b6 _:b2 .\n",
+ "_:b6 _:b7 .\n",
+ "_:b6 \"Earth101\" .\n",
+ "_:b6 _:b0 .\n",
+ "_:b6 _:b1 .\n",
+ "_:b6 _:b3 .\n",
+ "_:b6 _:b5 .\n",
+ "_:b7 \"Kanaha Custom\" .\n",
+ "_:b7 \"Wailea\" .\n",
+ "_:b7 \"2019\"^^ .\n"
+]
\ No newline at end of file
diff --git a/TestVectors/addBaseTransform.json b/TestVectors/addBaseTransform.json
new file mode 100644
index 0000000..d47f432
--- /dev/null
+++ b/TestVectors/addBaseTransform.json
@@ -0,0 +1,127 @@
+{
+ "mandatoryPointers": [
+ "/credentialSubject/sailNumber",
+ "/credentialSubject/sails/1",
+ "/credentialSubject/boards/0/year",
+ "/credentialSubject/sails/2"
+ ],
+ "mandatory": {
+ "dataType": "Map",
+ "value": [
+ [
+ 0,
+ "_:b0 \"Lahaina\" .\n"
+ ],
+ [
+ 1,
+ "_:b0 \"6.1E0\"^^ .\n"
+ ],
+ [
+ 2,
+ "_:b0 \"2023\"^^ .\n"
+ ],
+ [
+ 8,
+ "_:b2 \"2022\"^^ .\n"
+ ],
+ [
+ 12,
+ "_:b4 .\n"
+ ],
+ [
+ 13,
+ "_:b4 _:b6 .\n"
+ ],
+ [
+ 14,
+ "_:b5 \"Lahaina\" .\n"
+ ],
+ [
+ 15,
+ "_:b5 \"7\"^^ .\n"
+ ],
+ [
+ 16,
+ "_:b5 \"2020\"^^ .\n"
+ ],
+ [
+ 17,
+ "_:b6 _:b2 .\n"
+ ],
+ [
+ 19,
+ "_:b6 \"Earth101\" .\n"
+ ],
+ [
+ 20,
+ "_:b6 _:b0 .\n"
+ ],
+ [
+ 23,
+ "_:b6 _:b5 .\n"
+ ]
+ ]
+ },
+ "nonMandatory": {
+ "dataType": "Map",
+ "value": [
+ [
+ 3,
+ "_:b1 \"Lahaina\" .\n"
+ ],
+ [
+ 4,
+ "_:b1 \"7.8E0\"^^ .\n"
+ ],
+ [
+ 5,
+ "_:b1 \"2023\"^^ .\n"
+ ],
+ [
+ 6,
+ "_:b2 \"CompFoil170\" .\n"
+ ],
+ [
+ 7,
+ "_:b2 \"Wailea\" .\n"
+ ],
+ [
+ 9,
+ "_:b3 \"Kihei\" .\n"
+ ],
+ [
+ 10,
+ "_:b3 \"5.5E0\"^^ .\n"
+ ],
+ [
+ 11,
+ "_:b3 \"2023\"^^ .\n"
+ ],
+ [
+ 18,
+ "_:b6 _:b7 .\n"
+ ],
+ [
+ 21,
+ "_:b6 _:b1 .\n"
+ ],
+ [
+ 22,
+ "_:b6 _:b3 .\n"
+ ],
+ [
+ 24,
+ "_:b7 \"Kanaha Custom\" .\n"
+ ],
+ [
+ 25,
+ "_:b7 \"Wailea\" .\n"
+ ],
+ [
+ 26,
+ "_:b7 \"2019\"^^ .\n"
+ ]
+ ]
+ },
+ "hmacKeyString": "00112233445566778899AABBCCDDEEFF00112233445566778899AABBCCDDEEFF"
+}
\ No newline at end of file
diff --git a/TestVectors/addHashData.json b/TestVectors/addHashData.json
new file mode 100644
index 0000000..41b410e
--- /dev/null
+++ b/TestVectors/addHashData.json
@@ -0,0 +1,4 @@
+{
+ "proofHash": "109514ed8101a836d240819e30630f48639bf7f1f247074e928eaad99e5775d4",
+ "mandatoryHash": "e8bf46bff3db96eabc3a9410795dc94bc3537165e082f4a3e58841982fd7d4b3"
+}
\ No newline at end of file
diff --git a/TestVectors/addPointerValues.json b/TestVectors/addPointerValues.json
new file mode 100644
index 0000000..276b357
--- /dev/null
+++ b/TestVectors/addPointerValues.json
@@ -0,0 +1,26 @@
+[
+ {
+ "pointer": "/sailNumber",
+ "value": "Earth101"
+ },
+ {
+ "pointer": "/sails/1",
+ "value": {
+ "size": 6.1,
+ "sailName": "Lahaina",
+ "year": 2023
+ }
+ },
+ {
+ "pointer": "/boards/0/year",
+ "value": 2022
+ },
+ {
+ "pointer": "/sails/2",
+ "value": {
+ "size": 7,
+ "sailName": "Lahaina",
+ "year": 2020
+ }
+ }
+]
\ No newline at end of file
diff --git a/TestVectors/addProofConfig.json b/TestVectors/addProofConfig.json
new file mode 100644
index 0000000..6efbe4e
--- /dev/null
+++ b/TestVectors/addProofConfig.json
@@ -0,0 +1,13 @@
+{
+ "type": "DataIntegrityProof",
+ "cryptosuite": "bbs-2023",
+ "created": "2023-08-15T23:36:38Z",
+ "verificationMethod": "did:key:zUC7DerdEmfZ8f4pFajXgGwJoMkV1ofMTmEG5UoNvnWiPiLuGKNeqgRpLH2TV4Xe5mJ2cXV76gRN7LFQwapF1VFu6x2yrr5ci1mXqC1WNUrnHnLgvfZfMH7h6xP6qsf9EKRQrPQ#zUC7DerdEmfZ8f4pFajXgGwJoMkV1ofMTmEG5UoNvnWiPiLuGKNeqgRpLH2TV4Xe5mJ2cXV76gRN7LFQwapF1VFu6x2yrr5ci1mXqC1WNUrnHnLgvfZfMH7h6xP6qsf9EKRQrPQ",
+ "proofPurpose": "assertionMethod",
+ "@context": [
+ "https://www.w3.org/ns/credentials/v2",
+ {
+ "@vocab": "https://windsurf.grotto-networking.com/selective#"
+ }
+ ]
+}
\ No newline at end of file
diff --git a/TestVectors/addProofConfigCanon.txt b/TestVectors/addProofConfigCanon.txt
new file mode 100644
index 0000000..fb2245c
--- /dev/null
+++ b/TestVectors/addProofConfigCanon.txt
@@ -0,0 +1,5 @@
+_:c14n0 "2023-08-15T23:36:38Z"^^ .
+_:c14n0 .
+_:c14n0 "bbs-2023" .
+_:c14n0 .
+_:c14n0 .
diff --git a/TestVectors/addRawBaseSignatureInfo.json b/TestVectors/addRawBaseSignatureInfo.json
new file mode 100644
index 0000000..769340f
--- /dev/null
+++ b/TestVectors/addRawBaseSignatureInfo.json
@@ -0,0 +1,9 @@
+{
+ "bbsSignature": "93c7abe23fdf4856654bc858e607b7659af82b564340731454884724ec01e25360ac49e39cf0df7631535373042caed256abed6e81884e71a21590fef8dbe07e177dcedd8cfe94e4574c4ab51a22bdf9",
+ "mandatoryPointers": [
+ "/credentialSubject/sailNumber",
+ "/credentialSubject/sails/1",
+ "/credentialSubject/boards/0/year",
+ "/credentialSubject/sails/2"
+ ]
+}
\ No newline at end of file
diff --git a/TestVectors/addSignedSDBase.json b/TestVectors/addSignedSDBase.json
new file mode 100644
index 0000000..8372c62
--- /dev/null
+++ b/TestVectors/addSignedSDBase.json
@@ -0,0 +1,56 @@
+{
+ "@context": [
+ "https://www.w3.org/ns/credentials/v2",
+ {
+ "@vocab": "https://windsurf.grotto-networking.com/selective#"
+ }
+ ],
+ "type": [
+ "VerifiableCredential"
+ ],
+ "credentialSubject": {
+ "sailNumber": "Earth101",
+ "sails": [
+ {
+ "size": 5.5,
+ "sailName": "Kihei",
+ "year": 2023
+ },
+ {
+ "size": 6.1,
+ "sailName": "Lahaina",
+ "year": 2023
+ },
+ {
+ "size": 7,
+ "sailName": "Lahaina",
+ "year": 2020
+ },
+ {
+ "size": 7.8,
+ "sailName": "Lahaina",
+ "year": 2023
+ }
+ ],
+ "boards": [
+ {
+ "boardName": "CompFoil170",
+ "brand": "Wailea",
+ "year": 2022
+ },
+ {
+ "boardName": "Kanaha Custom",
+ "brand": "Wailea",
+ "year": 2019
+ }
+ ]
+ },
+ "proof": {
+ "type": "DataIntegrityProof",
+ "cryptosuite": "bbs-2023",
+ "created": "2023-08-15T23:36:38Z",
+ "verificationMethod": "did:key:zUC7DerdEmfZ8f4pFajXgGwJoMkV1ofMTmEG5UoNvnWiPiLuGKNeqgRpLH2TV4Xe5mJ2cXV76gRN7LFQwapF1VFu6x2yrr5ci1mXqC1WNUrnHnLgvfZfMH7h6xP6qsf9EKRQrPQ#zUC7DerdEmfZ8f4pFajXgGwJoMkV1ofMTmEG5UoNvnWiPiLuGKNeqgRpLH2TV4Xe5mJ2cXV76gRN7LFQwapF1VFu6x2yrr5ci1mXqC1WNUrnHnLgvfZfMH7h6xP6qsf9EKRQrPQ",
+ "proofPurpose": "assertionMethod",
+ "proofValue": "u2V0Cg9hAWFCTx6viP99IVmVLyFjmB7dlmvgrVkNAcxRUiEck7AHiU2CsSeOc8N92MVNTcwQsrtJWq-1ugYhOcaIVkP742-B-F33O3Yz-lORXTEq1GiK9-dhAWCAAESIzRFVmd4iZqrvM3e7_ABEiM0RVZneImaq7zN3u_4R4HS9jcmVkZW50aWFsU3ViamVjdC9zYWlsTnVtYmVyeBovY3JlZGVudGlhbFN1YmplY3Qvc2FpbHMvMXggL2NyZWRlbnRpYWxTdWJqZWN0L2JvYXJkcy8wL3llYXJ4Gi9jcmVkZW50aWFsU3ViamVjdC9zYWlscy8y"
+ }
+}
\ No newline at end of file
diff --git a/TestVectors/derivedAdjIndexes.json b/TestVectors/derivedAdjIndexes.json
new file mode 100644
index 0000000..63b3dae
--- /dev/null
+++ b/TestVectors/derivedAdjIndexes.json
@@ -0,0 +1,4 @@
+{
+ "adjMandatoryIndexes":[0,1,2,5,6,7,8,9,10,11,13,14,15],
+ "adjSelectiveIndexes":[3,4,8,11,12,13]
+}
\ No newline at end of file
diff --git a/TestVectors/derivedDisclosureData.json b/TestVectors/derivedDisclosureData.json
new file mode 100644
index 0000000..861265d
--- /dev/null
+++ b/TestVectors/derivedDisclosureData.json
@@ -0,0 +1,8 @@
+{
+ "bbsProof":"b29c719aba8103c713c5facba9b690930ad458816645adc1a53b251010bc3b128d72580239f66ff4e9739e28425794e881b5737fb3abce02b2655d4fb3babebd515685ce7567eab5bd01360e8131150576357509db309294569d822d56e1c581420a8af29b7c7984d50fd5c79a06d64a2586da8a24e93c3742d09f2c0e24d7fe4891927c7ffe408d563a64f586737867a1f020f742fc6eaa1d37eda426c9c75566de8be54822f69749fc462c86caaaf4f9f73ee1b08726f378432e382322a3cc0e87d5b23fc36364bc5c94cfb8a305be6f912bd7152e7a48d4d41571c653d58e5fea8a8238e05aea910e5b62c9d15b8d527c0d59f619fbab6a8799b1ce1da13c6516c23eefc03b247672878c34949943e02f4b3991139276c89a00c4ee64bbce570201ac3502fb4769e6b869919320ad9f3121dfeeecdb2914cfc7d4a386b6153f54b18b4148742ec7b66c81cff0b1de88d2d299f35f2ff817fb422fe0bbf65b5cd7deb939a10cc524f08eff46f31b5631afbd0551d9816e32fb2e4bb7214ce76136057c1298e2a161b5ec3280f0530130ab9600426c7e521d1b893850ae83cf4f211987c93f3a41c16b0cbac29e5dcf88eb65892518f643d5c2acd4888045d4",
+ "labelMap":{"dataType":"Map",
+ "value":[["c14n0","b2"],["c14n1","b4"],["c14n2","b7"],["c14n3","b6"],["c14n4","b5"],["c14n5","b0"]]
+ },
+ "mandatoryIndexes":[0,1,2,5,6,7,8,9,10,11,13,14,15],
+ "adjSelectiveIndexes":[3,4,8,11,12,13]
+}
\ No newline at end of file
diff --git a/TestVectors/derivedGroupIndexes.json b/TestVectors/derivedGroupIndexes.json
new file mode 100644
index 0000000..e19b808
--- /dev/null
+++ b/TestVectors/derivedGroupIndexes.json
@@ -0,0 +1,6 @@
+{
+ "combinedIndexes": [0, 1, 2, 6, 7, 8, 12, 13, 14, 15, 16, 17, 18, 19, 20, 23, 24, 25, 26],
+ "mandatoryIndexes": [0, 1, 2, 8, 12, 13, 14, 15, 16, 17, 19, 20, 23 ],
+ "nonMandatoryIndexes": [3, 4, 5, 6, 7, 9, 10, 11, 18, 21, 22, 24, 25, 26],
+ "selectiveIndexes": [6, 7, 8, 12, 13, 17, 18, 24, 25, 26]
+}
\ No newline at end of file
diff --git a/TestVectors/derivedRecoveredBaseData.json b/TestVectors/derivedRecoveredBaseData.json
new file mode 100644
index 0000000..4a32383
--- /dev/null
+++ b/TestVectors/derivedRecoveredBaseData.json
@@ -0,0 +1,10 @@
+{
+ "bbsSignature": "93c7abe23fdf4856654bc858e607b7659af82b564340731454884724ec01e25360ac49e39cf0df7631535373042caed256abed6e81884e71a21590fef8dbe07e177dcedd8cfe94e4574c4ab51a22bdf9",
+ "hmacKey": "00112233445566778899aabbccddeeff00112233445566778899aabbccddeeff",
+ "mandatoryPointers": [
+ "/credentialSubject/sailNumber",
+ "/credentialSubject/sails/1",
+ "/credentialSubject/boards/0/year",
+ "/credentialSubject/sails/2"
+ ]
+}
\ No newline at end of file
diff --git a/TestVectors/derivedRevealDocument.json b/TestVectors/derivedRevealDocument.json
new file mode 100644
index 0000000..3076d7f
--- /dev/null
+++ b/TestVectors/derivedRevealDocument.json
@@ -0,0 +1,46 @@
+{
+ "@context": [
+ "https://www.w3.org/ns/credentials/v2",
+ {
+ "@vocab": "https://windsurf.grotto-networking.com/selective#"
+ }
+ ],
+ "type": [
+ "VerifiableCredential"
+ ],
+ "credentialSubject": {
+ "sailNumber": "Earth101",
+ "sails": [
+ {
+ "size": 6.1,
+ "sailName": "Lahaina",
+ "year": 2023
+ },
+ {
+ "size": 7,
+ "sailName": "Lahaina",
+ "year": 2020
+ }
+ ],
+ "boards": [
+ {
+ "year": 2022,
+ "boardName": "CompFoil170",
+ "brand": "Wailea"
+ },
+ {
+ "boardName": "Kanaha Custom",
+ "brand": "Wailea",
+ "year": 2019
+ }
+ ]
+ },
+ "proof": {
+ "type": "DataIntegrityProof",
+ "cryptosuite": "bbs-2023",
+ "created": "2023-08-15T23:36:38Z",
+ "verificationMethod": "did:key:zUC7DerdEmfZ8f4pFajXgGwJoMkV1ofMTmEG5UoNvnWiPiLuGKNeqgRpLH2TV4Xe5mJ2cXV76gRN7LFQwapF1VFu6x2yrr5ci1mXqC1WNUrnHnLgvfZfMH7h6xP6qsf9EKRQrPQ#zUC7DerdEmfZ8f4pFajXgGwJoMkV1ofMTmEG5UoNvnWiPiLuGKNeqgRpLH2TV4Xe5mJ2cXV76gRN7LFQwapF1VFu6x2yrr5ci1mXqC1WNUrnHnLgvfZfMH7h6xP6qsf9EKRQrPQ",
+ "proofPurpose": "assertionMethod",
+ "proofValue": "u2V0DhNhAWQHAspxxmrqBA8cTxfrLqbaQkwrUWIFmRa3BpTslEBC8OxKNclgCOfZv9OlznihCV5TogbVzf7OrzgKyZV1Ps7q-vVFWhc51Z-q1vQE2DoExFQV2NXUJ2zCSlFadgi1W4cWBQgqK8pt8eYTVD9XHmgbWSiWG2ook6Tw3QtCfLA4k1_5IkZJ8f_5AjVY6ZPWGc3hnofAg90L8bqodN-2kJsnHVWbei-VIIvaXSfxGLIbKqvT59z7hsIcm83hDLjgjIqPMDofVsj_DY2S8XJTPuKMFvm-RK9cVLnpI1NQVccZT1Y5f6oqCOOBa6pEOW2LJ0VuNUnwNWfYZ-6tqh5mxzh2hPGUWwj7vwDskdnKHjDSUmUPgL0s5kROSdsiaAMTuZLvOVwIBrDUC-0dp5rhpkZMgrZ8xId_u7NspFM_H1KOGthU_VLGLQUh0Lse2bIHP8LHeiNLSmfNfL_gX-0Iv4Lv2W1zX3rk5oQzFJPCO_0bzG1Yxr70FUdmBbjL7Lku3IUznYTYFfBKY4qFhtewygPBTATCrlgBCbH5SHRuJOFCug89PIRmHyT86QcFrDLrCnl3PiOtliSUY9kPVwqzUiIBF1KYAAgEEAgcDBgQFBQCNAAECBQYHCAkKCw0OD4YDBAgLDA0"
+ }
+}
\ No newline at end of file
diff --git a/TestVectors/derivedUnsignedReveal.json b/TestVectors/derivedUnsignedReveal.json
new file mode 100644
index 0000000..25a824b
--- /dev/null
+++ b/TestVectors/derivedUnsignedReveal.json
@@ -0,0 +1,38 @@
+{
+ "@context": [
+ "https://www.w3.org/ns/credentials/v2",
+ {
+ "@vocab": "https://windsurf.grotto-networking.com/selective#"
+ }
+ ],
+ "type": [
+ "VerifiableCredential"
+ ],
+ "credentialSubject": {
+ "sailNumber": "Earth101",
+ "sails": [
+ {
+ "size": 6.1,
+ "sailName": "Lahaina",
+ "year": 2023
+ },
+ {
+ "size": 7,
+ "sailName": "Lahaina",
+ "year": 2020
+ }
+ ],
+ "boards": [
+ {
+ "year": 2022,
+ "boardName": "CompFoil170",
+ "brand": "Wailea"
+ },
+ {
+ "boardName": "Kanaha Custom",
+ "brand": "Wailea",
+ "year": 2019
+ }
+ ]
+ }
+}
\ No newline at end of file
diff --git a/TestVectors/windDoc.json b/TestVectors/windDoc.json
new file mode 100644
index 0000000..181965a
--- /dev/null
+++ b/TestVectors/windDoc.json
@@ -0,0 +1,48 @@
+{
+ "@context": [
+ "https://www.w3.org/ns/credentials/v2",
+ {
+ "@vocab": "https://windsurf.grotto-networking.com/selective#"
+ }
+ ],
+ "type": [
+ "VerifiableCredential"
+ ],
+ "credentialSubject": {
+ "sailNumber": "Earth101",
+ "sails": [
+ {
+ "size": 5.5,
+ "sailName": "Kihei",
+ "year": 2023
+ },
+ {
+ "size": 6.1,
+ "sailName": "Lahaina",
+ "year": 2023
+ },
+ {
+ "size": 7.0,
+ "sailName": "Lahaina",
+ "year": 2020
+ },
+ {
+ "size": 7.8,
+ "sailName": "Lahaina",
+ "year": 2023
+ }
+ ],
+ "boards": [
+ {
+ "boardName": "CompFoil170",
+ "brand": "Wailea",
+ "year": 2022
+ },
+ {
+ "boardName": "Kanaha Custom",
+ "brand": "Wailea",
+ "year": 2019
+ }
+ ]
+ }
+}
\ No newline at end of file
diff --git a/TestVectors/windMandatory.json b/TestVectors/windMandatory.json
new file mode 100644
index 0000000..2c5d0bc
--- /dev/null
+++ b/TestVectors/windMandatory.json
@@ -0,0 +1 @@
+["/credentialSubject/sailNumber", "/credentialSubject/sails/1", "/credentialSubject/boards/0/year", "/credentialSubject/sails/2"]
diff --git a/TestVectors/windSelective.json b/TestVectors/windSelective.json
new file mode 100644
index 0000000..5166418
--- /dev/null
+++ b/TestVectors/windSelective.json
@@ -0,0 +1 @@
+["/credentialSubject/boards/0", "/credentialSubject/boards/1"]
\ No newline at end of file
diff --git a/index.html b/index.html
index 2307190..544072e 100644
--- a/index.html
+++ b/index.html
@@ -110,6 +110,13 @@
status: "Working Draft",
publisher: "W3C Verifiable Credentials Working Group"
},
+ "DI-ECDSA": {
+ title: "The Elliptic Curve Digital Signature Algorithm Cryptosuites v1.0",
+ href: "https://www.w3.org/TR/vc-di-ecdsa/",
+ authors: ["David Longley", "Manu Sporny", "Marty Reed"],
+ status: "WD",
+ publisher: "W3C Verifiable Credentials Working Group"
+ },
"JSON-LD-FRAMING": {
title: "JSON-LD 1.1 Framing",
href: "https://www.w3.org/TR/json-ld11-framing",
@@ -190,10 +197,10 @@
-This specification describes the BBS+ Signature Suite created in 2023
-for the Data Integrity specification. The Signature Suite utilizes
-BBS+ signatures to provide the capability of zero knowledge
-proof disclosures.
+This specification describes a Data Integrity Cryptosuite for use when generating
+digital signatures using the BBS signature scheme.
+The Signature Suite utilizes BBS signatures to provide selective disclosure and
+unlinkable derived proofs.
@@ -207,154 +214,84 @@
Introduction
-This specification defines a set of cryptographic suites for the purpose of creating, verifying and deriving proofs
-for BBS+ Signatures in conformance with the Data Integrity [[VC-DATA-INTEGRITY]] specification.
+This specification defines a cryptographic suite for the purpose of
+creating, verifying, and deriving proofs using the BBS Signature Scheme in
+conformance with the Data Integrity [[VC-DATA-INTEGRITY]] specification. The
+BBS signature scheme directly provides for selective disclosure and unlinkable
+proofs. It provides four high-level functions that work within the issuer,
+holder, verifier model. Specifically, an issuer uses the BBS `Sign` function to
+create a cryptographic value known as a "BBS signature" which is used in signing
+the original credential. A holder, on receipt of
+a credential signed with BBS, then verifies the credential with the BBS `Verify`
+function.
-In general the suites uses the RDF Dataset Normalization Algorithm [[RDF-DATASET-NORMALIZATION]] to transform an
-input document into its canonical form. It then uses the statement digest algorithm
-to digest each statement to be signed individually, finally the digested statements are signed
-using the defined signature algorithm.
+The holder then chooses information to selectively disclose from the
+received credential and uses the BBS `ProofGen` function to generate a
+cryptographic value, known as a "BBS proof", which is used in creating a proof
+for this "derived credential". The cryptographic "BBS proof" value is not linkable
+to the original "BBS signature" and a different, unlinkable "BBS proof" can be
+generated by the holder for additional "derived credentials", including any
+containing the exact same information.
+Finally, a verifier uses the BBS `ProofVerify` function to verify the derived
+credential received from the holder.
-BBS+ signatures [[CFRG-BBS-SIGNATURE]] are compatible with any pairing friendly elliptic curve, however the cryptographic
-suites defined in this document elect to only allow the usage of the BLS12-381 for interoperability purposes.
+Applying the BBS signature scheme to verifiable credentials involves the
+processing specified in this document.
+In general the suite uses the RDF Dataset Normalization Algorithm
+[[RDF-DATASET-NORMALIZATION]] to transform an input document into its canonical
+form. An issuer then uses selective disclosure primitives to separate the
+canonical form into mandatory and non-mandatory statements. These are processed
+separately with other information to serve as the inputs to the BBS `Sign`
+function along with appropriate key material. This output is used to
+generate a secured credential. A holder uses a set of selective disclosure
+functions and the BBS `Verify` function on receipt of the credential
+to ascertain validity.
-
-
- Terminology
-The following terms are used to describe concepts involved in the
-generation and verification of the Data Integrity
-signature suite.
+Similarly, on receipt of a BBS signed credential, a holder uses the RDF Dataset
+Normalization Algorithm [[RDF-DATASET-NORMALIZATION]] to transform an input
+document into its canonical form, and then applies selective disclosure
+primitives to separate the canonical form into mandatory and selectively
+disclosed statements, which are appropriately processed and serve as inputs to
+the BBS `ProofGen` function. Suitably processed, the output of this function
+becomes the signed selectively disclosed credential sent to a verifier. Using
+canonicalization and selective disclosure primitives, the verifier can then use
+the BBS `verifyProof` function to validate the credential.
-
-
-
- - signature suite
- -
-A specified set of cryptographic primitives typically consisting of
-a canonicalization algorithm, a message digest algorithm, and a signature
-algorithm that are bundled together by cryptographers for developers
-for the purposes of safety and convenience.
-
- - canonicalization algorithm
- -
-An algorithm that takes an input document that has more than one possible
-representation and always transforms it into a canonical form. This process is
-sometimes also called normalization.
-
- - canonical form
- -
-The output of applying a canonicalization algorithm to an input document.
-
- - statement
- -
-n-quads statements are a sequence of RDF terms representing the subject, predicate, object and graph label.
-See the grammar definition here.
-
- - statement digest algorithm
- -
-An algorithm that takes a statement and produces a cryptographic
-output message that is often many orders of magnitude smaller than the
-input message. These algorithms are often 1) very fast, 2)
-non-reversible, 3) cause the output to change significantly when even one
-bit of the input message changes, and 4) make it infeasible to find two
-different inputs for the same output.
-
- - statement digest
- -
-The result of the application of the statement digest algorithm to a statement
-
- - signature algorithm
- -
-An algorithm that takes an input message and produces an output value where the
-receiver of the message can mathematically verify that the message has not
-been modified in transit and came from someone possessing a particular secret.
-
- - selective disclosure
- -
-An information disclosure technique which is the process of deciding and disclosing
-a sub-set of information from an original information set.
-
- - data integrity proof document
- -
-A linked data document featuring one or more data integrity proofs.
-
- - revealed statements
- -
-The set of statements produced by applying the canonicalization algorithm to the reveal document.
-
- - derive proof algorithm
- -
-An algorithm that takes in a data integrity proof document featuring a data integrity proof that supports a
-derive proof algorithm along side a reveal document
-and derives a proof only revealing the statements defined in the reveal document.
-
- - derived proof
- -
-The product of apply the derive proof algorithm to an data integrity proof document and reveal document.
-
- - quad
- -
-A quad as specified by [[RDF-DATASET-NORMALIZATION]]
-
- - n-quad
- -
-An n-quad which is a line based, plain text format encoding of a quad as defined by [[RDF-N-Quads]].
-
- - linked data document
- -
-A document comprised of linked data.
-
- - curve name
- -
-The name defining a particular cryptographic curve.
-
- - frame
- -
-A frame as specified by [[JSON-LD-FRAMING]]
-is a JSON-LD document, which describes the form for transforming another JSON-LD
-document using matching and embedding rules. A frame document allows additional
-keywords and certain map entries to describe the matching and transforming process.
-
- - JSON-LD document
- -
-A JSON-LD document as specified by [[JSON-LD-FRAMING]]
-is a is a serialization of an RDF dataset
-
- - framing algorithm
- -
-A Framing Algorithm as specified by [[JSON-LD-FRAMING]]
-is an algorithm that accomplishes the process of framing an input document to a given frame.
-
- - blank node
- -
-A blank node as specified by [[RDF-CONCEPTS]].
-In short, it is a node in a graph that is neither an IRI, nor a literal.
-
- - reveal document
- -
- A JSON-LD document in the form of a frame which describes the desired transform to apply to the input proof document using
- the framing algorithm defined in [[JSON-LD-FRAMING]].
-
-
- - revealed document
- -
- A data integrity proof document which is the product of the derive proof algorithm.
-
- - input proof document
- -
- A data integrity proof document featuring a data integrity proof that supports proof derivation.
-
-
-
+
+
-
@@ -369,132 +306,51 @@ Data Model
Verification Methods
-The cryptographic material used to verify a data integrity proof is
-called the verification method. This suite relies on public key material
-represented using [[MULTIBASE]], [[MULTICODEC]], JSON Web Key [[RFC7517]], and [[BLS-JOSE-COSE]].
-
-
-
-This suite MAY be used to verify Data Integrity Proofs [[VC-DATA-INTEGRITY]]
-produced by BLS12-381 public key material encoded as a
-JsonWebKey. Loss-less key transformation processes that
-result in equivalent cryptographic material MAY be utilized.
+These verification methods are used to verify Data Integrity Proofs
+[[VC-DATA-INTEGRITY]] produced using BLS12-381 cryptographic key material
+that is compliant with [[CFRG-BBS-SIGNATURE]]. The encoding formats for these key types
+are provided in this section. Lossless cryptographic key transformation
+processes that result in equivalent cryptographic key material MAY be used
+during the processing of digital signatures.
-
-
- JsonWebKey
-
-
-This definition should go in the Data Integrity specification and referenced
-from there.
-
-
-
-The `type` of the verification method MUST be `JsonWebKey`.
-
-
-
-The `controller` of the verification method MUST be a URL.
-
-
-
-The `publicKeyJwk` property of the verification method MUST be a public
-key encoded according to [[RFC7517]].
-
-
-
-The specific encoding of public keys is still being refined in [[BLS-JOSE-COSE]].
-
-
-
-Developers are advised to not accidentally publish a representation of a private
-key. Implementations of this specification MUST raise errors if expression
-of public key information includes a key parameter that is marked as
-`Private` in the IANA JSON Web Key Parameters registry.
-
-
-
-{
- "@context": [
- "https://www.w3.org/ns/did/v1",
- "https://w3id.org/security/data-integrity/v1"
- ],
- "id": "https://example.com/issuer/123",
- "verificationMethod": [{
- "id": "https://example.com/issuer/123#key-0",
- "type": "JsonWebKey",
- "controller": "https://example.com/issuer/123",
- "publicKeyJwk": {
- "kty": "OKP",
- "crv": "Bls12381G1",
- "x": "Ed4GBGLVasEp4ejPz44CvllbTldfLLcm2QcIJluBL6p_SQmRrZvJNa3YaJ-Wx8Im",
- "y": "AbdYAsAb20CHzlVW6VBO9i16BcGOmcYiMLlBEh9DfAiDu_1ZIAd1zewSi9f6517g"
- }
- }, {
- "id": "https://example.com/issuer/123#key-1",
- "type": "JsonWebKey",
- "controller": "https://example.com/issuer/123",
- "publicKeyJwk": {
- "kty": "OKP",
- "crv": "Bls12381G2",
- "x": "Ajs8lstTgoTgXMF6QXdyh3m8k2ixxURGYLMaYylVK_x0F8HhE8zk0YWiGV3CHwpQ
- Ea2sH4PBZLaYCn8se-1clmCORDsKxbbw3Js_Alu4OmkV9gmbJsy1YF2rt7Vxzs6S",
- "y": "BVkkrVEib-P_FMPHNtqxJymP3pV-H8fCdvPkoWInpFfM9tViyqD8JAmwDf64zU2h
- BV_vvCQ632ScAooEExXuz1IeQH9D2o-uY_dAjZ37YHuRMEyzh8Tq-90JHQvicOqx"
- }
- }]
-}
-
-
-
Multikey
-
-
-This definition should go in the Data Integrity specification and referenced
-from there.
-
-
-The `type` of the verification method MUST be `Multikey`.
+The Multikey format, as defined in
+[[VC-DATA-INTEGRITY]], is used to express public keys for the cryptographic
+suites defined in this specification.
-The `controller` of the verification method MUST be a URL.
-
-
-
-The `publicKeyMultibase` property of the verification method MUST be a public
-key encoded according to [[MULTICODEC]] and formatted according to
-[[MULTIBASE]]. The multicodec encoding of a BLS12-381 public key that combines
-both the G1 and G2 fields is the byte prefix `0xee` followed by the 48-byte G1
-public key data, which is then followed by the 96-byte G2 public key data. The
-145 byte value is then encoded using base64url with no padding (`u`) as the
-prefix. Any other encodings MUST NOT be used.
+The `publicKeyMultibase` property represents a Multibase-encoded Multikey
+expression of a BLS12-381 public key in the G2 group. The encoding of this field
+is the two-byte prefix `0xeb01` followed
+by the 96-byte compressed public key data.
+The 98-byte value is then encoded using base58-btc (`z`) as the prefix. Any
+other encodings MUST NOT be allowed.
Developers are advised to not accidentally publish a representation of a private
key. Implementations of this specification will raise errors in the event of a
-[[MULTICODEC]] value other than `0xee` being used in a `publicKeyMultibase`
-value.
+[[?MULTICODEC]] value other than `0xeb01` being used in a
+`publicKeyMultibase` value.
+ title="A BLS12-381 G2 group public key, encoded as a Multikey">
{
"id": "https://example.com/issuer/123#key-0",
"type": "Multikey",
"controller": "https://example.com/issuer/123",
- "publicKeyMultibase": "u7ljnAxKdp7YVqJvcMU9GtnmrMc1XZztXHsTsZ2LsmGJ67SsdbmNc
- S2SDs0daEPfhVXgODk0IVrgguJ-TJACHyXYa9Ae8DaxcvRy89KLgmWsyOOJn2oY7vCE2gt
- JoebMJiQsdbmNcS2SDs0daEPfhVXgODk0IVrgguJ-TJACHyXYa9Ae8DaxcvRy89KLgm"
+ "publicKeyMultibase": "zUC7EK3ZakmukHhuncwkbySmomv3FmrkmS36E4Ks5rsb6VQSRpoCrx6
+ Hb8e2Nk6UvJFSdyw9NK1scFXJp21gNNYFjVWNgaqyGnkyhtagagCpQb5B7tagJu3HDbjQ8h
+ 5ypoHjwBb"
}
-
+
{
"@context": [
"https://www.w3.org/ns/did/v1",
@@ -505,511 +361,1145 @@ Multikey
"id": "https://example.com/issuer/123#key-1",
"type": "Multikey",
"controller": "https://example.com/issuer/123",
- "publicKeyMultibase": "u7ljnAxKdp7YVqJvcMU9GtnmrMc1XZztXHsTsZ2LsmGJ67SsdbmNc
- S2SDs0daEPfhVXgODk0IVrgguJ-TJACHyXYa9Ae8DaxcvRy89KLgmWsyOOJn2oY7vCE2gt
- JoebMJiQsdbmNcS2SDs0daEPfhVXgODk0IVrgguJ-TJACHyXYa9Ae8DaxcvRy89KLgm"
+ "publicKeyMultibase": "zUC7EK3ZakmukHhuncwkbySmomv3FmrkmS36E4Ks5rsb6VQSRpoCr
+ x6Hb8e2Nk6UvJFSdyw9NK1scFXJp21gNNYFjVWNgaqyGnkyhtagagCpQb5B7tagJu3HDbjQ8h
+ 5ypoHjwBb"
}]
}
-
- Data Integrity Proof
- bbs-signature-2023
-
- This suite relies on detached digital signatures represented using [[MULTIBASE]].
-
+ Proof Representations
+This suite relies on detached digital signatures represented using [[MULTIBASE]]
+and [[?MULTICODEC]].
+
+
+
+ DataIntegrityProof
+
+
The `verificationMethod` property of the proof MUST be a URL.
Dereferencing the `verificationMethod` MUST result in an object
containing a `type` property with the value set to
-`Multikey` or `JsonWebKey`.
-
+`Multikey`.
+
-
+
The `type` property of the proof MUST be `DataIntegrityProof`.
-
-
-The `cryptosuite` property of the proof MUST be `bbs-signature-2023`.
-
-
+
+
+The `cryptosuite` property of the proof MUST be `bbs-2023`.
+
+
The `created` property of the proof MUST be an [[XMLSCHEMA11-2]]
-formated date string.
-
-
+formatted date string.
+
+
The `proofPurpose` property of the proof MUST be a string, and MUST
match the verification relationship expressed by the verification method
`controller`.
-
-
-The `proofValue` property of the proof MUST be a detached BBS Signature
-produced according to Sign, encoded according to
-[[MULTIBASE]] using the base64 base encoding with no padding.
-
+
+
+The value of the `proofValue` property of the proof MUST be an BBS signature or
+BBS proof produced according to [[CFRG-BBS-SIGNATURE]] then serialized and encoded
+according to procedures in section .
+
+
+
-
-{
- "@context": [
- {"title": "https://schema.org/title"},
- "https://w3id.org/security/data-integrity/v1"
- ],
- "title": "Hello world!",
- "proof": {
- "type": "DataIntegrityProof",
- "cryptosuite": "bbs-signature-2023",
- "created": "2020-11-05T19:23:24Z",
- "verificationMethod": "https://example.com/issuer/123#key-2",
- "proofPurpose": "assertionMethod",
- "proofValue": "uU6i3dTz5yFfWJ8zgsamuyZa4yAHPm75tUOOXddR6krCvCYk77sbCOuEVcdB
- Dd/l6tIYkTTbA3pmDa6Qia/JkOnIXDLmoBz3vsi7L5t3DWySI/VLmBqleJ/Tbus5RoyiDERDB
- 5rnACXlnOqJ/U8yFQFtcp/mBCc2FtKNPHae9jKIv1dm9K9QK1F3GI1AwyGoUfjLWrkGDObO1o
- AhpEd0+et+qiOf2j8p3MTTtRRx4Hgjcl0jXCq7C7R5/nLpgimHAAAAdAx4ouhMk7v9dXijCIM
- 0deicn6fLoq3GcNHuH5X1j22LU/hDu7vvPnk/6JLkZ1xQAAAAIPd1tu598L/K3NSy0zOy6oba
- Enaqc1R5Ih/6ZZgfEln2a6tuUp4wePExI1DGHqwj3j2lKg31a/6bSs7SMecHBQdgIYHnBmCYG
- nu/LZ9TFV56tBXY6YOWZgFzgLDrApnrFpixEACM9rwrJ5ORtxAAAAAgE4gUIIC9aHyJNa5TBk
- Oh6ojlvQkMVLXa/vEl+3NCLXblxjgpM7UEMqBkE9/aGQcoD3Tgmy+z0hN+4elMky1RnJEhCuN
- QNsEg"
- }
-}
-
+
+
+
+ Algorithms
+
+
+The following algorithms describe how to use verifiable credentials with
+the BBS Signature Scheme [[CFRG-BBS-SIGNATURE]]. When using the BBS signature
+scheme the SHAKE-256 variant SHOULD be used.
+
+
+
+Implementations SHOULD fetch and cache verification method information as
+early as possible when adding or verifying proofs. Parameters passed to
+functions in this section use information from the verification
+method — such as the public key size — to determine function parameters — such
+as the cryptographic hashing algorithm.
+
+
+
+When the RDF Dataset Canonicalization Algorithm [[RDF-CANON]] is used,
+implementations of that algorithm will detect
+dataset poisoning
+by default, and abort processing upon detection.
+
-
- bbs-proof-2023
-
- This suite relies on detached digital signatures represented using [[MULTIBASE]].
-
+ Selective Disclosure Functions
-
-The `verificationMethod` property of the proof MUST be a URL.
-Dereferencing the `verificationMethod` MUST result in an object
-containing a `type` property with the value set to
-`Multikey` or `JsonWebKey`.
-
+
+ createShuffledIdLabelMapFunction
+
+The following algorithm creates a label map factory function that uses an
+HMAC to shuffle canonical blank node identifiers. The required input is an HMAC
+(previously initialized with a secret key), HMAC. A function,
+labelMapFactoryFunction, is produced as output.
+
-
-The `type` property of the proof MUST be `DataIntegrityProof`.
-
-
-The `cryptosuite` property of the proof MUST be `bbs-proof-2023`.
-
-
-The `created` property of the proof MUST be an [[XMLSCHEMA11-2]]
-formated date string.
-
-
-The `proofPurpose` property of the proof MUST be a string, and MUST
-match the verification relationship expressed by the verification method
-`controller`.
-
-
-The `proofValue` property of the proof MUST be a detached BBS Signature
-produced according to ProofGen,
-encoded according to [[MULTIBASE]] using the base64 base encoding with no padding.
-
+
+ -
+Create a function, labelMapFactoryFunction, with one required input
+(a canonical node identifier map, canonicalIdMap), that will
+return a blank node identifier map, bnodeIdMap, as output. Set the
+function's implementation to:
+
+ -
+Generate a new empty bnode identifier map, bnodeIdMap.
+
+ -
+For each map entry, entry, in canonicalIdMap:
+
+ -
+Perform an HMAC operation on the canonical identifier from the value in entry to get an HMAC
+digest, digest.
+
+ -
+Generate a new string value, b64urlDigest, and initialize it to "u"
+followed by appending a base64url-no-pad encoded version of the digest
+value.
+
+ -
+Add a new entry, newEntry, to bnodeIdMap using the key
+from entry and b64urlDigest as the value.
+
+
+
+ -
+Derive the shuffled mapping from the `bnodeIdMap` as follows:
+
+ -
+Set `hmacIds` to be the sorted array of values from the `bnodeIdMap`, and set
+`bnodeKeys` to be the ordered array of keys from the `bnodeIdMap`.
+
+ -
+For each key in `bnodeKeys`, replace the `bnodeIdMap` value for that key with the
+index position of the value in the `hmacIds` array prefixed by "b", i.e.,
+`bnodeIdMap.set(bkey, 'b' + hmacIds.indexOf(bnodeIdMap.get(bkey)))`.
+
+
+
+ -
+Return bnodeIdMap.
+
+
+
+ -
+Return labelMapFactoryFunction.
+
+
+
+
-
-{
- "@context": [
- {"title": "https://schema.org/title"},
- "https://w3id.org/security/data-integrity/v1"
- ],
- "title": "Hello world!",
- "proof": {
- "type": "DataIntegrityProof",
- "cryptosuite": "bbs-proof-2023",
- "created": "2020-11-05T19:23:24Z",
- "verificationMethod": "https://example.com/issuer/123#key-2",
- "proofPurpose": "assertionMethod",
- "generators": 3,
- "disclosed": [ 2 ],
- "proofValue": "uU6i3dTz5yFfWJ8zgsamuyZa4yAHPm75tUOOXddR6krCvCYk77sbCOuEVcdB
- Dd/l6tIYkTTbA3pmDa6Qia/JkOnIXDLmoBz3vsi7L5t3DWySI/VLmBqleJ/Tbus5RoyiDERDB
- 5rnACXlnOqJ/U8yFQFtcp/mBCc2FtKNPHae9jKIv1dm9K9QK1F3GI1AwyGoUfjLWrkGDObO1o
- AhpEd0+et+qiOf2j8p3MTTtRRx4Hgjcl0jXCq7C7R5/nLpgimHAAAAdAx4ouhMk7v9dXijCIM
- 0deicn6fLoq3GcNHuH5X1j22LU/hDu7vvPnk/6JLkZ1xQAAAAIPd1tu598L/K3NSy0zOy6oba
- Enaqc1R5Ih/6ZZgfEln2a6tuUp4wePExI1DGHqwj3j2lKg31a/6bSs7SMecHBQdgIYHnBmCYG
- nu/LZ9TFV56tBXY6YOWZgFzgLDrApnrFpixEACM9rwrJ5ORtxAAAAAgE4gUIIC9aHyJNa5TBk
- Oh6ojlvQkMVLXa/vEl+3NCLXblxjgpM7UEMqBkE9/aGQcoD3Tgmy+z0hN+4elMky1RnJEhCuN
- QNsEg"
- }
-}
-
-
-
-
-
- Algorithms
+
+ bbs-2023 Functions
+
+ serializeBaseProofValue
+
+The following algorithm serializes the base proof value, including the
+BBS signature, HMAC key, and mandatory pointers.
+The required inputs are a base signature bbsSignature,
+an HMAC key hmacKey, and an array of
+mandatoryPointers.
+A single base proof string value is produced as output.
+
-
-The following section describes multiple Data Integrity cryptographic suites
-that utilize the BBS Signature Algorithm [[CFRG-BBS-SIGNATURE]].
-
+
+ -
+Initialize a byte array, `proofValue`, that starts with the BBS base proof
+header bytes `0xd9`, `0x5d`, and `0x02`.
+
+ -
+Initialize `components` to an array with five elements containing the values of:
+`bbsSignature`, `hmacKey`, and `mandatoryPointers`.
+
+ -
+CBOR-encode `components` and append it to `proofValue`.
+
+ -
+Initialize `baseProof` to a string with the multibase-base64url-no-pad-encoding
+of `proofValue`. That is, return a string starting with "`u`" and ending with the
+base64url-no-pad-encoded value of `proofValue`.
+
+ -
+Return `baseProof` as base proof.
+
+
-
+
+
+ parseBaseProofValue
+
+The following algorithm parses the components of a `bbs-2023` selective
+disclosure base proof value. The required input is a proof value
+(proofValue). A single object, parsed base proof, containing
+three elements, using the names "bbsSignature", "hmacKey",
+and "mandatoryPointers", is produced as output.
+
+
+
+ -
+Ensure the `proofValue` string starts with `u`, indicating that it is a
+multibase-base64url-no-pad-encoded value, and throw an error if it does not.
+
+ -
+Initialize `decodedProofValue` to the result of base64url-no-pad-decoding the
+substring following the leading `u` in `proofValue`.
+
+ -
+Ensure that the `decodedProofValue` starts with the BBS base proof header
+bytes `0xd9`, `0x5d`, and `0x02`, and throw an error if it does not.
+
+ -
+Initialize `components` to an array that is the result of CBOR-decoding the
+bytes that follow the three-byte ECDSA-SD base proof header. Ensure the result
+is an array of three elements.
+
+ -
+Return an object with properties set to the three elements, using the names
+"bbsSignature", "hmacKey", and "mandatoryPointers",
+respectively.
+
+
+
+
+
+
+ createDisclosureData
+
+
+The following algorithm creates data to be used to generate a derived proof. The
+inputs include a JSON-LD document (document), a BBS base proof
+(proof), an array of JSON pointers to use to selectively disclose
+statements (selectivePointers), and any custom JSON-LD API options
+(such as a document loader). A single object, disclosure data, is
+produced as output, which contains the "bbsProof", "labelMap",
+"mandatoryIndexes", "selectiveIndexes", and "revealDocument" fields.
+
+
+
+ -
+Initialize `bbsSignature`, `hmacKey`, and
+`mandatoryPointers` to the values of the associated properties in the object
+returned when calling the algorithm in Section
+, passing the `proofValue` from `proof`.
+
+ -
+Initialize `hmac` to an HMAC API using `hmacKey`. The HMAC uses the same hash
+algorithm used in the signature algorithm, i.e., SHAKE-256.
+
+ -
+Initialize `labelMapFactoryFunction` to the result of calling the
+`createShuffledIdLabelMapFunction` algorithm passing `hmac` as `HMAC`.
+
+ -
+Initialize `combinedPointers` to the concatenation of `mandatoryPointers`
+and `selectivePointers`.
+
+ -
+Initialize `groupDefinitions` to a map with the following entries: key of
+the string `"mandatory"` and value of `mandatoryPointers`; key of the string
+`"selective"` and value of `selectivePointers`; and key of the string `"combined"`
+and value of `combinedPointers`.
+
+ -
+Initialize `groups` and `labelMap` to the result of calling the algorithm in
+Section 3.3.16
+canonicalizeAndGroup of the [[DI-ECDSA]] specification, passing `document`
+`labelMapFactoryFunction`,
+`groupDefinitions`, and any custom JSON-LD
+API options. Note: This step transforms the document into an array of canonical
+N-Quads whose order has been shuffled based on 'hmac' applied blank node
+identifiers, and groups
+the N-Quad strings according to selections based on JSON pointers.
+
+
+ -
+Compute the mandatory indexes relative to their positions in the combined
+statement list, i.e., find the position at which a mandatory statement occurs
+in the list of combined statements. One method for doing this is given below.
+
+ -
+Initialize `mandatoryIndexes` to an empty array. Set `mandatoryMatch` to
+`groups.mandatory.matching` map; set `combinedMatch` to
+`groups.combined.matching`; and set `combinedIndexes` to the ordered array of
+just the keys of the `combinedMatch` map.
+
+ -
+For each key in the `mandatoryMatch` map, find its index in the `combinedIndexes`
+array (e.g., `combinedIndexes.indexOf(key)`), and add this value to the
+`mandatoryIndexes` array.
+
+
+
+ -
+Compute the selective indexes relative to their positions in the non-mandatory
+statement list, i.e., find the position at which a selected statement occurs in
+the list of non-mandatory statements. One method for doing this is given below.
+
+ -
+Initialize `selectiveIndexes` to an empty array. Set `selectiveMatch` to the
+`groups.selective.matching` map; set `mandatoryNonMatch` to the map
+`groups.mandatory.nonMatching`; and `nonMandatoryIndexes` to to the ordered array of
+just the keys of the `mandatoryNonMatch` map.
+
+ -
+For each key in the `selectiveMatch` map, find its index in the `nonMandatoryIndexes`
+array (e.g., `nonMandatoryIndexes.indexOf(key)`), and add this value to the
+`selectiveIndexes` array.
+
+
+
+
+ -
+Initialize `bbsMessages` to an array of byte arrays obtained from the
+UTF-8 encoding of the the values in the `nonMandatory` array.
+
+ -
+Recompute the `bbsHeader` using the following steps:
+
+
+ -
+Initialize `proofHash` to the result of calling the RDF Dataset Canonicalization
+algorithm [[RDF-CANON]] on `proof` with the `proofValue` removed and then
+cryptographically
+hashing the result using the same hash that is used by the signature algorithm,
+i.e., SHAKE-256. Note: This step can be performed in parallel;
+it only needs to be completed before this algorithm terminates, as the result is
+part of the return value.
+
+ -
+Initialize `mandatoryHash` to the result of calling the algorithm in
+Section 3.3.17
+hashMandatoryNQuads of the [[DI-ECDSA]] specification, passing the values
+from the map
+groups.mandatory.matching and utilizing the SHAKE-256 algorithm.
+
+ -
+Set `bbsHeader` to the concatenation of `proofHash` and `mandatoryHash` in that
+order.
+
+
+
+
+ -
+Set `bbsProof` to the value computed by the `ProofGen` procedure from
+[[CFRG-BBS-SIGNATURE]], i.e. `ProofGen(PK, signature, header, ph, messages, disclosed_indexes)`,
+where `PK` is the original issuers public key, `signature` is the
+`bbsSignature`, `header` is the `bbsHeader`, `ph` is an empty byte array,
+`messages` is `bbsMessages`, and `disclosed_indexes` is `selectiveIndexes`.
+
+
+ -
+Initialize revealDocument to the result of the "selectJsonLd"
+algorithm, passing `document`, and `combinedPointers` as `pointers`.
+
+ -
+Run the RDF Dataset Canonicalization Algorithm [[RDF-CANON]] on
+the joined combinedGroup.deskolemizedNQuads, passing any custom
+options, and get the canonical bnode identifier map, canonicalIdMap.
+Note: This map includes the canonical blank node identifiers that a verifier
+will produce when they canonicalize the reveal document.
+
+ -
+Initialize verifierLabelMap to an empty map. This map will map
+the canonical blank node identifiers produced by the verifier when they
+canonicalize the revealed document, to the blank node identifiers that were
+originally signed in the base proof.
+
+ -
+For each key (`inputLabel`) and value (`verifierLabel`) in `canonicalIdMap:
+
+ -
+Add an entry to `verifierLabelMap`, using `verifierLabel` as the key, and the
+value associated with `inputLabel` as a key in `labelMap` as the value.
+
+
+
+ -
+Return an object with properties matching `bbsProof`, "verifierLabelMap" for `labelMap`,
+`mandatoryIndexes`, `selectiveIndexes`, and `revealDocument`.
+
+
+
+
+
+
+ compressLabelMap
+
+The following algorithm compresses a label map. The required input is
+label map (labelMap). The output is a compressed label map.
+
+
+
+ -
+Initialize `map` to an empty map.
+
+ -
+For each entry (`k`, `v`) in `labelMap`:
+
+ -
+Add an entry to `map`, with a key that is a base-10 integer parsed from the
+characters following the "c14n" prefix in `k`, and a value that is a base-10
+integer parsed from the characters following the "b" prefix in `v`.
+
+
+
+ -
+Return `map` as compressed label map.
+
+
+
+
+
+ decompressLabelMap
+
+
+The following algorithm decompresses a label map. The required input is a
+compressed label map (compressedLabelMap). The output is a
+decompressed label map.
+
+
+
+ -
+Initialize `map` to an empty map.
+
+
+ -
+For each entry (`k`, `v`) in `compressedLabelMap`:
+
+ -
+Add an entry to `map`, with a key that adds the prefix "c14n" to `k`, and a value
+that adds a prefix of "b" to `v`.
+
+
+
+ -
+Return `map` as decompressed label map.
+
+
+
+
+
+
+ serializeDerivedProofValue
+
+
+The following algorithm serializes a derived proof value. The required inputs
+are a BBS proof (bbsProof), a label map (labelMap), an
+array of mandatory indexes (mandatoryIndexes), and an array of
+selective indexes (selectiveIndexes). A single derived proof
+value, serialized as a byte string, is produced as output.
+
-
-The `bbs-signature-2023` cryptographic suite takes an input document, canonicalizes
+
+ -
+Initialize `compressedLabelMap` to the result of calling the algorithm in
+Section , passing `labelMap` as the parameter.
+
+ -
+Initialize a byte array, `proofValue`, that starts with the BBS disclosure
+proof header bytes `0xd9`, `0x5d`, and `0x03`.
+
+ -
+Initialize `components` to an array with four elements containing the values of
+`bbsProof`, `compressedLabelMap`, `mandatoryIndexes`, and `selectiveIndexes`.
+
+ -
+CBOR-encode `components` and append it to `proofValue`.
+
+ -
+Return the derived proof as a string with the
+multibase-base64url-no-pad-encoding of `proofValue`. That is, return a string
+starting with "`u`" and ending with the base64url-no-pad-encoded value of
+`proofValue`.
+
+
+
+
+
+
+ parseDerivedProofValue
+
+
+The following algorithm parses the components of the derived proof value.
+The required input is a derived proof value (proofValue). A
+A single derived proof value value object is produced as output, which
+contains a set of five elements, using the names "bbsProof", "labelMap",
+"mandatoryIndexes", and "selectiveIndexes".
+
+
+
+ -
+Ensure the `proofValue` string starts with `u`, indicating that it is a
+multibase-base64url-no-pad-encoded value, and throw an error if it does not.
+
+ -
+Initialize `decodedProofValue` to the result of base64url-no-pad-decoding the
+substring that follows the leading `u` in `proofValue`.
+
+ -
+Ensure that the `decodedProofValue` starts with the ECDSA-SD disclosure proof
+header bytes `0xd9`, `0x5d`, and `0x03`, and throw an error if it does not.
+
+
+ -
+Initialize `components` to an array that is the result of CBOR-decoding the
+bytes that follow the three-byte BBS disclosure proof header. Ensure the result
+is an array of four elements —
+a byte array, a map of integers to integers, an
+array of integers, and another array of integers; otherwise, throw an error.
+
+ -
+Replace the second element in `components` using the result of calling the
+algorithm in Section , passing the existing
+second element of `components` as `compressedLabelMap`.
+
+ -
+Return derived proof value as an object with properties set to the five
+elements, using the names "`bbsProof`", "`labelMap`", "`mandatoryIndexes`", and
+"`selectiveIndexes`" respectively.
+
+
+
+
+
+
+ createVerifyData
+
+
+The following algorithm creates the data needed to perform verification of a
+BBS-protected verifiable credential. The inputs include a JSON-LD
+document (document), a BBS disclosure proof (proof),
+and any custom JSON-LD API options (such as a document loader). A single
+verify data object value is produced as output containing the following
+fields: "`bbsProof`", "`proofHash`", "`mandatoryHash`", "`selectedIndexes`", and
+"`nonMandatory`".
+
+
+
+ -
+Initialize `proofHash` to the result of performing RDF Dataset Canonicalization
+[[RDF-CANON]] on the proof options, i.e., the proof portion of the document
+with the `proofValue` removed. The hash used is the same as that used in
+the signature algorithm, i.e., SHA-256 for a P-256 curve. Note: This step can be
+performed in parallel; it only needs to be completed before this algorithm needs
+to use the `proofHash` value.
+
+ -
+Initialize `bbsProof`, `labelMap`, `mandatoryIndexes`, and `selectiveIndexes` to
+the values associated with their property names in the object returned when
+calling the algorithm in Section
+, passing `proofValue` from `proof`.
+
+ -
+Initialize `labelMapFactoryFunction` to the result of calling the
+"`createLabelMapFunction`" algorithm.
+
+ -
+Initialize `nquads` to the result of calling the "`labelReplacementCanonicalize`"
+algorithm of [[DI-ECDSA]], passing `document`, `labelMapFactoryFunction`, and
+any custom
+JSON-LD API options. Note: This step transforms the document into an array of
+canonical N-Quads with pseudorandom blank node identifiers based on `labelMap`.
+
+ -
+Initialize `mandatory` to an empty array.
+
+ -
+Initialize `nonMandatory` to an empty array.
+
+ -
+For each entry (`index`, `nq`) in `nquads`, separate the N-Quads into mandatory
+and non-mandatory categories:
+
+ -
+If `mandatoryIndexes` includes `index`, add `nq` to `mandatory`.
+
+ -
+Otherwise, add `nq` to `nonMandatory`.
+
+
+
+ -
+Initialize `mandatoryHash` to the result of calling the "`hashMandatory`"
+primitive, passing `mandatory`.
+
+ -
+Return an object with properties matching `baseSignature`, `proofHash`,
+`nonMandatory`, `mandatoryHash`, and `selectiveIndexes`.
+
+
+
+
+
+
+
+
+ bbs-2023
+
+
+The `bbs-2023` cryptographic suite takes an input document, canonicalizes
the document using the Universal RDF Dataset Canonicalization Algorithm
-[[RDF-CANON]], and then cryptographically hashes and signs the output
-resulting in the production of a data integrity proof. The algorithms in this
-section also include the verification of such a data integrity proof.
-
+[[RDF-CANON]], and then applies a number of transformations and cryptographic
+operations resulting in the production of a data integrity proof. The algorithms
+in this section also include the verification of such a data integrity proof.
+
-
- Add Proof
+
+ Add Base Proof (bbs-2023)
-
-To generate a proof, the algorithm in
+
+To generate a base proof, the algorithm in
-Section 4.1: Add Proof in the Data Integrity
+Section 4.1: Add Proof of the Data Integrity
[[VC-DATA-INTEGRITY]] specification MUST be executed.
For that algorithm, the cryptographic suite specific
transformation algorithm is defined in Section
-, the
+, the
-hashing algorithm is defined in Section ,
+hashing algorithm is defined in Section ,
and the
proof serialization algorithm is defined in Section
-.
-
-
-
-
-
-
-
+
+ Base Proof Transformation (bbs-2023)
+
The following algorithm specifies how to transform an unsecured input document
into a transformed document that is ready to be provided as input to the
-hashing algorithm in Section .
-
-
-
+hashing algorithm in Section .
+
+
Required inputs to this algorithm are an
-unsecured data document (unsecuredDocument
) and
-
-transformation options (options
). The
+
+unsecured data document (unsecuredDocument) and
+transformation options (options). The
transformation options MUST contain a type identifier for the
-
-cryptographic suite (type
) and a cryptosuite
-identifier (cryptosuite
). A transformed data document is
-produced as output. Whenever this algorithm encodes strings, it MUST use UTF-8
-encoding.
-
-
-
- -
-If
options
.type
is not set to the string
-`DataIntegrityProof` and options
.cryptosuite
is not
-set to the string `bbs-signature-2023` then a `PROOF_TRANSFORMATION_ERROR` MUST be
-raised.
-
- -
-Let
canonicalDocument
be the result of applying the
-Universal RDF Dataset Canonicalization Algorithm
-[[RDF-CANON]] to the unsecuredDocument
.
-
- -
-Set
output
to the value of canonicalDocument
.
-
- -
-Return
canonicalDocument
as the transformed data document.
-
-
-
+
+cryptographic suite (type), a cryptosuite
+identifier (cryptosuite), and a verification method
+(verificationMethod). The transformation options MUST contain an
+array of mandatory JSON pointers (mandatoryPointers) and MAY contain
+additional options, such as a JSON-LD document loader. A transformed data
+document is produced as output. Whenever this algorithm encodes strings, it
+MUST use UTF-8 encoding.
+
+
+ -
+Initialize `hmac` to an HMAC API using a locally generated and exportable HMAC
+key. The HMAC uses the same hash algorithm used in the signature algorithm,
+i.e., SHAKE-256.
+
+ -
+Initialize `labelMapFactoryFunction` to the result of calling the
+`createShuffledIdLabelMapFunction` algorithm passing `hmac` as `HMAC`.
+
+ -
+Initialize `groupDefinitions` to a map with an entry with a key of the string
+"`mandatory`" and a value of mandatoryPointers.
+
+ -
+Initialize `groups` to the result of calling the algorithm in
+Section 3.3.16
+canonicalizeAndGroup of the [[DI-ECDSA]] specification, passing
+`labelMapFactoryFunction`,
+`groupDefinitions`, `unsecuredDocument` as `document`, and any custom JSON-LD
+API options. Note: This step transforms the document into an array of canonical
+N-Quads whose order has been shuffled based on 'hmac' applied blank node
+identifiers, and groups
+the N-Quad strings according to selections based on JSON pointers.
+
+ -
+Initialize `mandatory` to the values in the `groups.mandatory.matching` map.
+
+ -
+Initialize `nonMandatory` to the values in the `groups.mandatory.nonMatching`
+map.
+
+ -
+Initialize `hmacKey` to the result of exporting the HMAC key from `hmac`.
+
+ -
+Return an object with "`mandatoryPointers`" set to `mandatoryPointers`,
+"`mandatory`" set to `mandatory`, "`nonMandatory`" set to `nonMandatory`,
+and "`hmacKey`" set to `hmacKey`.
+
+
+
-
- Hashing
+
+ Base Proof Hashing (bbs-2023)
-
+
The following algorithm specifies how to cryptographically hash a
transformed data document and proof configuration
into cryptographic hash data that is ready to be provided as input to the
-algorithms in Section or
-Section .
-
+algorithms in Section .
+
-
+
The required inputs to this algorithm are a transformed data document
-(transformedDocument
) and proof configuration
-(proofConfig
). A single hash data value represented as
-series of bytes is produced as output.
-
+(transformedDocument) and canonical proof configuration
+(canonicalProofConfig). A hash data value represented
+as an object is produced as output.
+
-
- -
-
- Specify how each item in the canonicalized input is hashed and included a set
-that is then signed over in .
-
-
- -
-
- Specify how proofConfigHash
is generated.
-
-
-
- -
-
- Specify how hashData
is composed in a way that can be signed over in
- .
-
-
-
- -
-Return
hashData
as the hash data.
-
-
+
+ -
+Initialize `proofHash` to the result of calling the RDF Dataset Canonicalization
+algorithm [[RDF-CANON]] on `canonicalProofConfig` and then cryptographically
+hashing the result using the same hash that is used by the signature algorithm,
+i.e., SHAKE-256. Note: This step can be performed in parallel;
+it only needs to be completed before this algorithm terminates, as the result is
+part of the return value.
+
+ -
+Initialize `mandatoryHash` to the result of calling the the algorithm in
+Section 3.3.17
+hashMandatoryNQuads of the [[DI-ECDSA]] specification, passing
+transformedDocument.`mandatory` and utilizing the SHAKE-256
+algorithm.
+
+ -
+Initialize `hashData` as a deep copy of transformedDocument, and
+add `proofHash` as "`proofHash`" and `mandatoryHash` as "`mandatoryHash`" to that
+object.
+
+ -
+Return `hashData` as hash data.
+
+
-
+
-
- Proof Configuration
+
+ Base Proof Configuration (bbs-2023)
-
+
The following algorithm specifies how to generate a
proof configuration from a set of proof options
-that is used as input to the proof hashing algorithm.
-
+that is used as input to the
+base proof hashing algorithm.
+
-
+
The required inputs to this algorithm are proof options
-(options
). The proof options MUST contain a type identifier
+(options). The proof options MUST contain a type identifier
for the
-
-cryptographic suite (type
) and MUST contain a cryptosuite
-identifier (cryptosuite
). A proof configuration
+
+cryptographic suite (type) and MUST contain a cryptosuite
+identifier (cryptosuite). A proof configuration
object is produced as output.
-
+
-
- -
-Let
proofConfig
be an empty object.
-
- -
-Set
proofConfig
.type
to
-options
.type
.
-
- -
-If
options
.cryptosuite
is set, set
-proofConfig
.cryptosuite
to its value.
-
- -
-If
options
.type
is not set to `DataIntegrityProof` and
-proofConfig
.cryptosuite
is not set to `bbs-signature-2023`, an
+
+ -
+Let proofConfig be an empty object.
+
+ -
+Set proofConfig.type to
+options.type.
+
+ -
+If options.cryptosuite is set, set
+proofConfig.cryptosuite to its value.
+
+ -
+If options.type is not set to `DataIntegrityProof` and
+proofConfig.cryptosuite is not set to `bbs-2023`, an
`INVALID_PROOF_CONFIGURATION` error MUST be raised.
-
- -
-Set
proofConfig
.created
to
-options
.created
. If the value is not a valid
+
+ -
+Set proofConfig.created to
+options.created. If the value is not a valid
[[XMLSCHEMA11-2]] datetime, an `INVALID_PROOF_DATETIME` error MUST be raised.
-
- -
-Set
proofConfig
.verificationMethod
to
-options
.verificationMethod
.
-
- -
-Set
proofConfig
.proofPurpose
to
-options
.proofPurpose
.
-
- -
-Return
proofConfig
.
-
-
+
+ -
+Set proofConfig.verificationMethod to
+options.verificationMethod.
+
+ -
+Set proofConfig.proofPurpose to
+options.proofPurpose.
+
+ -
+Set proofConfig.@context to
+unsecuredDocument.@context.
+
+ -
+Let canonicalProofConfig be the result of applying the
+Universal RDF Dataset Canonicalization Algorithm
+[[RDF-CANON]] to the proofConfig.
+
+ -
+Return canonicalProofConfig.
+
+
-
+
-
- Proof Serialization
-
-The following algorithm specifies how to serialize a digital signature from
-a set of cryptographic hash data. This
+
+ Base Proof Serialization (bbs-2023)
+
+
+The following algorithm, to be called by an issuer of a BBS-protected Verifiable
+Credential, specifies how to create a base proof. The base proof is to be
+given only to the holder, who is responsible for generating a derived proof from
+it, exposing only selectively disclosed details in the proof to a verifier. This
algorithm is designed to be used in conjunction with the algorithms defined
in the Data Integrity [[VC-DATA-INTEGRITY]] specification,
-
+
Section 4: Algorithms. Required inputs are
-cryptographic hash data (hashData
) and
-proof options (options
). The
+cryptographic hash data (hashData) and
+proof options (options). The
proof options MUST contain a type identifier for the
-
-cryptographic suite (type
) and MAY contain a cryptosuite
-identifier (cryptosuite
). A single digital proof value
+
+cryptographic suite (type) and MAY contain a cryptosuite
+identifier (cryptosuite). A single digital proof value
represented as series of bytes is produced as output.
-
+
-
- -
-Let
privateKeyBytes
be the result of retrieving the
-private key bytes associated with the
-options
.verificationMethod
value as described in the
-Data Integrity [[VC-DATA-INTEGRITY]] specification.
-
- -
-
- Specify how proofBytes
is generated and consumed by Section
-.
-
-
- -
-Return
proofBytes
as the digital proof.
-
-
+
+ -
+Initialize `proofHash`, `mandatoryPointers`, `mandatoryHash`, `nonMandatory`,
+and `hmacKey` to the values associated with their property names in
+hashData.
+
+ -
+Initialize `bbsHeader` to the concatenation of `proofHash` and `mandatoryHash` in
+that order.
+
+ -
+Initialize `bbsMessages` to an array of byte arrays obtained from the
+UTF-8 encoding of the the values in the `nonMandatory` array.
+
+ -
+Compute the `bbsSignature` using the `Sign` procedure of [[CFRG-BBS-Signature]]
+with appropriate key material and `bbsHeader` for the `header` and `bbsMessages`
+for the `messages`
+
+ -
+Initialize `proofValue to the result of calling the algorithm in Section
+, passing `bbsSignature`,
+`hmacKey`, and `mandatoryPointers` as parameters
+to the algorithm.
+
+ -
+Return `proofValue` as digital proof.
+
+
+
-
-
- Proof Verification
+
+ Add Derived Proof (bbs-2023)
-
-The following algorithm specifies how to verify a digital signature from
-a set of cryptographic hash data. This
-algorithm is designed to be used in conjunction with the algorithms defined
-in the Data Integrity [[VC-DATA-INTEGRITY]] specification,
-
-Section 4: Algorithms. Required inputs are
-cryptographic hash data (hashData
),
-a digital signature (proofBytes
) and
-proof options (options
). A verification result
-represented as a boolean value is produced as output.
-
+
+The following algorithm, to be called by a holder of a `bbs-2023`-protected
+verifiable credential, creates a selective disclosure derived proof.
+The derived proof is to be given to the verifier. The inputs include a
+JSON-LD document (document), a BBS base proof
+(proof), an array of JSON pointers to use to selectively disclose
+statements (selectivePointers), and any custom JSON-LD API options,
+such as a document loader. A single selectively revealed document
+value, represented as an object, is produced as output.
+
-
- -
-Let
publicKeyBytes
be the result of retrieving the
-public key bytes associated with the
-options
.verificationMethod
value as described in the
-Data Integrity [[VC-DATA-INTEGRITY]] specification,
-
-Section 4: Retrieving Cryptographic Material.
-
- -
-Let
verificationResult
be the result of applying the verification
-algorithm defined in the BBS Signature specification [[CFRG-BBS-SIGNATURE]],
-with hashData
as the data to be verified against the
-proofBytes
using the public key specified by
-publicKeyBytes
.
-
- -
-Return
verificationResult
as the verification result.
-
-
+
+ -
+Initialize `bbsProof`, `labelMap`, `mandatoryIndexes`, `selectiveIndexes`, and
+`revealDocument` to the values associated with their
+property names in the object returned when calling the algorithm in
+Section , passing the `document`, `proof`,
+`selectivePointers`, and any custom JSON-LD API options, such as a document
+loader.
+
+ -
+Initialize `newProof` to a shallow copy of `proof`.
+
+ -
+Replace `proofValue` in `newProof` with the result of calling the algorithm
+in Section , passing `bbsProof`,
+`labelMap`, `mandatoryIndexes`, and `selectiveIndexes`.
+
+ -
+Set the value of the "`proof`" property in `revealDocument` to `newProof`.
+
+ -
+Return `revealDocument` as the selectively revealed document.
+
+
-
-
-
- bbs-proof-2023
-
- The `bbs-proof-2023` cryptographic suite takes an input document,
- that has previously been secured using `bbs-signature-2023`,
- derives from this original document a set of messages to be disclosed
- representing a redacted form of the original document, and
- applies the Proof Gen
- algorithm to produce a proof of knowledge for the disclosed messages.
- The result is a new `proof`, containing the following attributes:
-
-
-
-
- -
- `generators`
-
- -
- `disclosed`
-
- -
- `proofValue`
-
-
- This operation can be applied by any
- holder of a `bbs-signature-2023`
- secured document, and as such, `bbs-proof-2023` MUST be implemented with
- awareness of the mandatory to disclose fields the original
- issuer required.
-
-
-
-
-
-
-
- Proof Configuration
-
-
-
- Proof Serialization
-
-
-
+
+
+
+
+ Verify Derived Proof (bbs-2023)
+
+
+The following algorithm attempts verification of a `bbs-2023` derived
+proof. This algorithm is called by a verifier of an BBS-protected
+verifiable credential. The inputs include a JSON-LD document
+(document), a BBS disclosure proof (proof), and any
+custom JSON-LD API options (such as a document loader). A single boolean
+verification result value is produced as output.
+
-
-
+
+
+ -
+Initialize `bbsProof`, `proofHash`, `mandatoryHash`, `selectedIndexes`, and
+`nonMandatory` to the values associated with their property
+names in the object returned when calling the algorithm in Section
+, passing the `document`, `proof`, and any
+custom JSON-LD API options (such as a document loader).
+
+ -
+Initialize `bbsHeader` to the concatenation of `proofHash` and `mandatoryHash`
+in that order. Initialize `disclosedMessages` to an array of byte arrays
+obtained from the UTF-8 encoding of the elements of the `nonMandatory` array.
+
+
+ -
+Initialize `verificationResult` to the result of applying the verification
+algorithm `ProofVerify` of [[CFRG-BBS-SIGNATURE]]
+with `PK` set as the public key of the original issuer, `proof` set as `bbsProof`,
+`header` set as `bbsHeader`, `disclosed_messages` set as `disclosedMessages`,
+`ph` set as an empty byte array, and `disclosed_indexes` set as
+`selectiveIndexes`. Return `verificationResult` as verification result.
+
+
+
+
-
- Privacy Considerations
- TODO: We need to add a complete list of privacy
- considerations.
-
+
-
- Security Considerations
- TODO: We need to add a complete list of security
- considerations.
-
+
+ Privacy Considerations
+ TODO: We need to add a complete list of privacy
+ considerations.
+
+
+ Security Considerations
+ TODO: We need to add a complete list of security
+ considerations.
+
+
+ Test Vectors
+
+Demonstration of selective disclosure features including mandatory disclosure,
+selective disclosure, and overlap between those,
+requires an input credential document with more content than previous test
+vectors. To avoid excessively long test vectors, the starting document test
+vector is based on a purely fictitious windsurfing (sailing) competition
+scenario. In addition, we break the test vectors into two groups, based on those
+that would be generated by the issuer (base proof) and those that would be
+generated by the holder (derived proof).
+
+
+ Base Proof
+
+To add a selective disclosure base proof to a document, the issuer needs
+the following cryptographic key material:
+
+
+ -
+The issuer's private/public key pair, i.e., the key pair corresponding to the
+verification method that will be part of the proof.
+
+ -
+An HMAC key. This is used to randomize the order of the blank node IDs to avoid
+potential information leakage via the blank node ID ordering. This is used only
+once, and is shared between issuer and holder. The HMAC in this case is
+functioning as a pseudorandom function (PRF).
+
+
+
+The key material used for generating the test vectors to test add base
+proof is shown below. Hexadecimal representation is used for the BBS key
+pairs and the HMAC key.
+
+
+
+
+In our scenario, a sailor is registering with a race organizer for a series of
+windsurfing races to be held over a number of days on Maui. The organizer will
+inspect the sailor's equipment to certify that what has been declared is
+accurate. The sailor's unsigned equipment inventory is shown below.
+
+
+
+In addition to letting other sailors know what kinds of equipment their competitors
+may be sailing on, it is mandatory that each sailor disclose the year of their
+most recent windsurfing board and full details on two of their sails. Note that
+all sailors are identified by a sail number that is printed on all their
+equipment. This mandatory information is specified via an array of JSON pointers
+as shown below.
+
+
+
+The result of applying the above JSON pointers to the sailor's equipment document
+is shown below.
+
+
+
+Transformation of the unsigned document begins with canonicalizing the document,
+as shown below.
+
+
+
+To prevent possible information leakage from the ordering of the blank node IDs
+these are processed through a PRF (i.e., the HMAC) to give the canonicalized HMAC
+document shown below. This represents an ordered list of statements that will be
+subject to mandatory and selective disclosure, i.e., it is from this list that
+statements are grouped.
+
+
+
+The above canonical document gets grouped into mandatory and non-mandatory
+statements. The final output of the selective disclosure transformation process
+is shown below. Each statement is now grouped as mandatory or non-mandatory, and
+its index in the previous list of statements is remembered.
+
+
+
+The next step is to create the base proof configuration and canonicalize it.
+This is shown in the following two examples.
+
+
+
+
+In the hashing step, we compute the SHAKE-256 hash of the canonicalized proof
+options to produce the `proofHash`, and we compute the SHAKE-256 hash of the
+join of all the mandatory N-Quads to produce the `mandatoryHash`. These are
+shown below in hexadecimal format.
+
+
+
+Shown below are the computed `bbsSignature` in hexadecimal, and the
+`mandatoryPointers`. These are are fed to the final serialization step with the
+`hmacKey`.
+
+
+
+Finally, the values above are run through the algorithm of Section
+, to produce the `proofValue` which is
+used in the signed base document shown below.
+
+
+
+
+ Derived Proof
+
+To create a derived proof, a holder starts with a signed document
+containing a base proof. The base document we will use for these test vectors is
+the final example from Section , above. The first
+step is to run the algorithm of Section to
+recover `bbsSignature`, `hmacKey`, and `mandatoryPointers`, as shown below.
+
+
+
+Next, the holder needs to indicate what else, if anything, they wish to reveal
+to the verifiers, by specifying JSON pointers for selective disclosure. In our
+windsurfing competition scenario, a sailor (the holder) has just completed their
+first day of racing, and wishes to reveal to the general public (the verifiers)
+all the details of the windsurfing boards they used in the competition. These
+are shown below. Note that this slightly overlaps with the mandatory disclosed
+information which included only the year of their most recent board.
+
+
+
+To produce the `revealDocument` (i.e., the unsigned document that will
+eventually be signed and sent to the verifier), we append the selective pointers
+to the mandatory pointers, and input these combined pointers along with the
+document without proof to the `selectJsonLd` algorithm of [[DI-ECDSA]],
+to get the result shown below.
+
+
+
+Now that we know what the revealed document looks like, we need to furnish
+appropriately updated information to the verifier about which statements are
+mandatory, and the indexes for the selected non-mandatory statements. Running
+step 6 of the
+ yields an abundance of information about
+various statement groups relative to the original document. Below we show a
+portion of the indexes for those groups.
+
+
+
+The verifier needs to be able to aggregate and hash the mandatory statements. To
+enable this, we furnish them with a list of indexes of the mandatory statements
+adjusted to their positions in the reveal document (i.e., relative to the
+`combinedIndexes`), while the `selectiveIndexes` need to be adjusted relative to
+their positions within the `nonMandatoryIndexes`. These "adjusted" indexes are
+shown below.
+
+
+
+
+The last important piece of disclosure data is a mapping of canonical blank node
+IDs to HMAC-based shuffled IDs, the `labelMap`, computed according to Section
+. This is shown below along with
+the rest of the disclosure data minus the reveal document.
+
+
+
+Finally, using the disclosure data above with the algorithm of Section
+, we obtain the signed derived (reveal)
+document shown below.
+
+
+
+
+