diff --git a/data/bandersnatch_sw_sha512_tai_ietf_vectors.json b/data/bandersnatch_sw_sha512_tai_ietf_vectors.json index b75ec3d..312b32f 100644 --- a/data/bandersnatch_sw_sha512_tai_ietf_vectors.json +++ b/data/bandersnatch_sw_sha512_tai_ietf_vectors.json @@ -1,6 +1,6 @@ [ { - "comment": "Bandersnatch_SW_SHA-512_ELL2 - vector-1", + "comment": "Bandersnatch_SW_SHA-512_TAI - vector-1", "flags": "00", "sk": "2bd8776e6ca6a43d51987f756be88b643ab4431b523132f675c8f0004f5d5a17", "pk": "f55a48e6befa22dc42007ec6d5bf83620d8f794901f42b18c10a4f7a6176985280", @@ -13,7 +13,7 @@ "proof_s": "c05e6568ece7e8409f86962e48594546fcaf7d338ca9938c77db863b1d294f11" }, { - "comment": "Bandersnatch_SW_SHA-512_ELL2 - vector-2", + "comment": "Bandersnatch_SW_SHA-512_TAI - vector-2", "flags": "00", "sk": "3d6406500d4009fdf2604546093665911e753f2213570a29521fd88bc30ede18", "pk": "fd72a90d1eeba6733824e76bb31991b8108d6562756b85f244333e3c7205225200", @@ -26,7 +26,7 @@ "proof_s": "56c0c71dc0250b3b8e51b66e8c2794e76974840f0d3176db4bb2207cf080db1c" }, { - "comment": "Bandersnatch_SW_SHA-512_ELL2 - vector-3", + "comment": "Bandersnatch_SW_SHA-512_TAI - vector-3", "flags": "00", "sk": "8b9063872331dda4c3c282f7d813fb3c13e7339b7dc9635fdc764e32cc57cb15", "pk": "e30eae606d21dff460cdaecfc9bfcd2e319628ccc0242f3ca21f2d5c940ba41680", @@ -39,7 +39,7 @@ "proof_s": "70a744c46a4e72826de234deebcb6e826e23e4375f5233fd78ed0a4353b6c508" }, { - "comment": "Bandersnatch_SW_SHA-512_ELL2 - vector-4", + "comment": "Bandersnatch_SW_SHA-512_TAI - vector-4", "flags": "00", "sk": "6db187202f69e627e432296ae1d0f166ae6ac3c1222585b6ceae80ea07670b14", "pk": "2a00e5a32e2f097858a1a4a73cf5c2fb4e6d375a4ea4cc3ae3e91660eade850c80", @@ -52,7 +52,7 @@ "proof_s": "85c6fc7354bd6a2e52aa5b5e25269d9cba3c21b34f8f80c596a56f62d8fa911c" }, { - "comment": "Bandersnatch_SW_SHA-512_ELL2 - vector-5", + "comment": "Bandersnatch_SW_SHA-512_TAI - vector-5", "flags": "00", "sk": "b56cc204f1b6c2323709012cb16c72f3021035ce935fbe69b600a88d842c7407", "pk": "4f29d79a27b9545d7223431eb6a63776949454b16e2ac0b7a959304ce3e52b6a00", @@ -65,7 +65,7 @@ "proof_s": "a228daebff3f0c261bd6382d4e0be4f043ee687704a6596f72c626bc52b31418" }, { - "comment": "Bandersnatch_SW_SHA-512_ELL2 - vector-6", + "comment": "Bandersnatch_SW_SHA-512_TAI - vector-6", "flags": "00", "sk": "da36359bf1bfd1694d3ed359e7340bd02a6a5e54827d94db1384df29f5bdd302", "pk": "e58e8ba2e99035fb7ae11fa14e2a609d6d13679278dac63ebee64ca8612ffa1480", diff --git a/data/ed25519_sha512_tai_ietf_vectors.json b/data/ed25519_sha512_tai_ietf_vectors.json index bffde48..b8670a5 100644 --- a/data/ed25519_sha512_tai_ietf_vectors.json +++ b/data/ed25519_sha512_tai_ietf_vectors.json @@ -9,7 +9,7 @@ "h": "5e1dbdda4ce6a06e9e1062bf1c5d2ff6dd2689ba99a9b4c57e86bc8c9953e685", "gamma": "bfc0b9986b3732c37543cd5f66830a2c0d5005ee5d1a075c5ac6504c7a25fc1d", "beta": "1ce87ebb1710c3b55ff9c72e1d7e6f852bdbc4e793aa2ff1ceea1c698ca9b5b10709598d6c35b32e7caeecdb26e6d6f18199bc1545c3789fc30ef167ff8b51a5", - "proof_c": "ade440cd841f6432cf98f73e121d5a6e00000000000000000000000000000000", + "proof_c": "ade440cd841f6432cf98f73e121d5a6e", "proof_s": "452e5cc6afe527b077a8e1c3602873d890cbf729b67ae43b8cb651a1f9164a0d" }, { @@ -22,7 +22,7 @@ "h": "a8b789c2545f3fbeb1f801136462a7734ff39a37d7352cc8d0b1d5bf2db54a08", "gamma": "9aeb013867130f00b431f30038d5054dfcd29b45bb2402d6f4f6b65225504eec", "beta": "ad37782159dce2a4674a2fa5e411df74c1bd4a59a119271252c459a6cb0a58d281705d0a0256a2a941a369f9f1d71dea0c1c7d3bf069cfd6edf92a267f8ed3f3", - "proof_c": "abd9d0175ef80e70c18d5f70f1a7d85900000000000000000000000000000000", + "proof_c": "abd9d0175ef80e70c18d5f70f1a7d859", "proof_s": "3f5b2196bd1f7ea04323d2325fbf876884cbdcc8b83551af5a4ffbb46478d30f" }, { @@ -35,7 +35,7 @@ "h": "b2f60d2cdd4e97a5418b383abdea63c57aab609fa05f579c43374bf008e23964", "gamma": "d0bd8c705293879daed5264dd2c4b129a941db2e9513544a17100ec4634dfd46", "beta": "45e62e4c45a49aafc014042de5a870824326641d156bca99f894b2e8a0f44193f27ef85500a87a8ac023d66e4f70bb8b1bbb0b3d0b25eb5093ed99b3122fa1f4", - "proof_c": "9f4b10e5c86351368c14360e179fd44b00000000000000000000000000000000", + "proof_c": "9f4b10e5c86351368c14360e179fd44b", "proof_s": "925a654de7718fe44b7742cf1804447322ba4f1d861d67d5f73a9f040eb9540f" }, { @@ -48,7 +48,7 @@ "h": "69567baa9dc496ef24aafb2da474ade1e1202f061e413ea33e0877dd8db085c7", "gamma": "be9ce3cd0b2ee84de50ce670ae9673ab3b5dc8aaffc7474415e09699f7378155", "beta": "c1e4361d7314984ce6b2d21fe4c27d12413fb5364e6e3be6894a3d5ad5c2f3783a03efc6908b203758eab6264cb03f1cdee98aa1ebe810f60e97f86cc27d221e", - "proof_c": "0d57ba5ec304a522f2f312f49613cd3800000000000000000000000000000000", + "proof_c": "0d57ba5ec304a522f2f312f49613cd38", "proof_s": "1f748f63f83197bf22e67aa850c1301187cbf50cb40fcd305910e340dd5e140d" }, { @@ -61,7 +61,7 @@ "h": "71d02b885f6a81bb31d52c5bc54d8ba1c2fc52c42e34ddfd026e669caa30ca9e", "gamma": "032de270c2fba46c5345d09b039c389e826deb4083bba4a0e343518b1377c80f", "beta": "7ea607c45bc373ac948f334bb8d392b17f5156cf19330bab7a2572eaf7fcaa48fff48847d88a20659de03b2066ad8449d9c1fc3c178a6f3b074c33f7a74d0fde", - "proof_c": "666c44103979c11069521d4ba44e52fa00000000000000000000000000000000", + "proof_c": "666c44103979c11069521d4ba44e52fa", "proof_s": "f99e18b0bf2ec11c6c33e8bde9758af2d07e38ba2848b6d5eb3462d156630009" }, { @@ -74,7 +74,7 @@ "h": "bc89b34d01bda6e9115462584ed1f5554e9035ea4b4b77d27dbd3027c16dfa43", "gamma": "50901ea735820db2198b6b6a8c8b62abdbc7bedde6b88a30e439583f1e387103", "beta": "4dc1285faf82ddd0b0b60f6055051d3f501e0b55b57d5f440467eea15dce84e56b20dca9462c11e57439b558b557f09a5ae8fbafea3f2d477f8f9aad10f19fc3", - "proof_c": "127dcc8f1a99979f5e4386f31d0af2f400000000000000000000000000000000", + "proof_c": "127dcc8f1a99979f5e4386f31d0af2f4", "proof_s": "da688770c94d9fda341f68294beeb746f7f796fa801b07666b1b1039bd250505" } ] \ No newline at end of file diff --git a/data/secp256_sha256_tai_ietf_vectors.json b/data/secp256_sha256_tai_ietf_vectors.json index 23ac5bd..87c4a34 100644 --- a/data/secp256_sha256_tai_ietf_vectors.json +++ b/data/secp256_sha256_tai_ietf_vectors.json @@ -9,7 +9,7 @@ "h": "02ce0d7db4edd23b99b3736443c53ad12b133266c45646be3902a854606672e6ef", "gamma": "0298b844148e41d128a2f2d49eb208618039cd9ece62d0fc2659461ebe2c7df2ac", "beta": "92fd1eff9ddb4173ee87c45476b1c43486da7c20efd7a3c5b88891cf99bcd680", - "proof_c": "000000000000000000000000000000007b331610072248bd15c7aef0832ae2f9", + "proof_c": "7b331610072248bd15c7aef0832ae2f9", "proof_s": "d30c63494b5e087f26bca7e3b57dd4876f6d01597e9ac272f33c7ac48f11a5b8" }, { @@ -22,7 +22,7 @@ "h": "0242336b14ccf0f101b64a1cca4b08f72f6be851f717dc56c07262500a84071e06", "gamma": "03b2ae579ccf850038d9ba6b94a87646e8ecdec789639aab4c1492f11b114e6515", "beta": "f30b3d4fb257c287cf423d044796ce1eaf6ec9c9d42ae4305274dad9a2fbd4dd", - "proof_c": "00000000000000000000000000000000230bcb8e6ff67f15e2aa27a7e570e743", + "proof_c": "230bcb8e6ff67f15e2aa27a7e570e743", "proof_s": "8fdd6585e6ca4e09ada872c09c2ccacaa60df0cd919b10c4e2a68285ba236105" }, { @@ -35,7 +35,7 @@ "h": "03b90c05c10ddbfdafda6a964012946641737f888b90c930abdd2454c568f73d70", "gamma": "03c572767b190538c104182dfac985cd58430074f0db24305f4006d896c93b96c7", "beta": "d06d1139d2f0199b36d48b8d4980421e3d98504442e475c1760dcbbd19d27543", - "proof_c": "000000000000000000000000000000004b5b11f3b418095a5d02f835a283cc99", + "proof_c": "4b5b11f3b418095a5d02f835a283cc99", "proof_s": "78bd588629d7dbfa7909384fc55590443f7d72996655ca933bf5979ccf342e14" }, { @@ -48,7 +48,7 @@ "h": "02bca955248372984158e0f9bf7d8d9bb1ee87f188cf27e93647a967a0f9b09535", "gamma": "0323c860bdba4ceccc28ebed9283dc6f74b4e16c94ffe566f49318b955458c20e3", "beta": "cd9444669432a6b5ca8ad3a4adcd8152fb8a76e327877ea876e6bf3e761b6e13", - "proof_c": "00000000000000000000000000000000c209c08df735f0a15e6318fcb7158693", + "proof_c": "c209c08df735f0a15e6318fcb7158693", "proof_s": "9db4909d8cab16e9e79004fc74fc66eb3560debbad9aaa01f61e4077f893f1f7" }, { @@ -61,7 +61,7 @@ "h": "039edace829d35ef117e135c8b81b5b00a0d3c9f24a349761fc07a7503bf048966", "gamma": "02d7b9b206af6fae3a6225ef09701e965e5387a22eca8e1bc71c167cd53f66903a", "beta": "0f4965be7ec48d075a0035d5d1874a05328cb9f1cbe6b4066813ebb38e46b101", - "proof_c": "00000000000000000000000000000000ebc773f07f5a559bf176f57f83d5d80d", + "proof_c": "ebc773f07f5a559bf176f57f83d5d80d", "proof_s": "e0f386d45474ea6eb40ebf59e4c7e6695813b6bf4b7984db868e415d1d3432e9" }, { @@ -74,7 +74,7 @@ "h": "029d367ea8eaf47bdb453f836c0c6afe9e5540c33adf4b20e1ecdf96c45f07ed1f", "gamma": "0288734131a4dddb3a69e0ff346bd6afa5fcf29a328f1a4f7fa00c512127960e33", "beta": "0a53ce69bc69cf00fc7f42a74717d8271fb6f7124351697e64e006e1e77733ba", - "proof_c": "000000000000000000000000000000007a1041d32a0e043ab11822977b6108b9", + "proof_c": "7a1041d32a0e043ab11822977b6108b9", "proof_s": "b46c510a1bd8605aaf76d61407ea94a1158244b9b513a1653dfd347e2ea33847" } ] \ No newline at end of file diff --git a/data/vectors-generate.sh b/data/vectors-generate.sh new file mode 100755 index 0000000..b1e1570 --- /dev/null +++ b/data/vectors-generate.sh @@ -0,0 +1,7 @@ +#!/usr/bin/env bash + +cargo test \ + --release \ + --features full \ + -- \ + --ignored diff --git a/src/codec.rs b/src/codec.rs new file mode 100644 index 0000000..6455683 --- /dev/null +++ b/src/codec.rs @@ -0,0 +1,136 @@ +use ark_ec::short_weierstrass::SWCurveConfig; + +use super::*; + +/// Defines points and scalars encoding format. +pub trait Codec { + const BIG_ENDIAN: bool; + + /// Point encode. + fn point_encode(pt: &AffinePoint, buf: &mut Vec); + + /// Point decode. + fn point_decode(buf: &[u8]) -> AffinePoint; + + /// Scalar encode + fn scalar_encode(sc: &ScalarField, buf: &mut Vec); + + /// Scalar decode. + fn scalar_decode(buf: &[u8]) -> ScalarField; +} + +/// Arkworks codec. +/// +/// Little endian. Points flags in MSB. Compression enabled. +pub struct ArkworksCodec; + +impl Codec for ArkworksCodec { + const BIG_ENDIAN: bool = false; + + fn point_encode(pt: &AffinePoint, buf: &mut Vec) { + pt.serialize_compressed(buf).unwrap(); + } + + fn point_decode(buf: &[u8]) -> AffinePoint { + AffinePoint::::deserialize_compressed(buf).unwrap() + } + + fn scalar_encode(sc: &ScalarField, buf: &mut Vec) { + sc.serialize_compressed(buf).unwrap(); + } + + fn scalar_decode(buf: &[u8]) -> ScalarField { + ScalarField::::from_le_bytes_mod_order(buf) + } +} + +/// SEC 1 codec (https://www.secg.org/sec1-v2.pdf) +/// +/// Big endian. Points flags in LSB. Compression enabled. +pub struct Sec1Codec; + +impl Codec for Sec1Codec +where + BaseField: ark_ff::PrimeField, + CurveConfig: SWCurveConfig, + AffinePoint: utils::SWMapping>, +{ + const BIG_ENDIAN: bool = true; + + fn point_encode(pt: &AffinePoint, buf: &mut Vec) { + use ark_ff::biginteger::BigInteger; + use utils::SWMapping; + + if pt.is_zero() { + buf.push(0x00); + return; + } + let mut tmp = Vec::new(); + let sw = pt.into_sw(); + + let is_odd = sw.y.into_bigint().is_odd(); + buf.push(if is_odd { 0x03 } else { 0x02 }); + + sw.x.serialize_compressed(&mut tmp).unwrap(); + tmp.reverse(); + buf.extend_from_slice(&tmp[..]); + } + + fn point_decode(buf: &[u8]) -> AffinePoint { + use ark_ff::biginteger::BigInteger; + use utils::SWMapping; + type SWAffine = ark_ec::short_weierstrass::Affine; + if buf.len() == 1 && buf[0] == 0x00 { + return AffinePoint::::zero(); + } + let mut tmp = buf.to_vec(); + tmp.reverse(); + let y_flag = tmp.pop().unwrap(); + + let x = BaseField::::deserialize_compressed(&mut &tmp[..]).unwrap(); + let (y1, y2) = SWAffine::>::get_ys_from_x_unchecked(x).unwrap(); + let y = if ((y_flag & 0x01) != 0) == y1.into_bigint().is_odd() { + y1 + } else { + y2 + }; + let sw = SWAffine::>::new_unchecked(x, y); + AffinePoint::::from_sw(sw) + } + + fn scalar_encode(sc: &ScalarField, buf: &mut Vec) { + let mut tmp = Vec::new(); + sc.serialize_compressed(&mut tmp).unwrap(); + tmp.reverse(); + buf.extend_from_slice(&tmp[..]); + } + + fn scalar_decode(buf: &[u8]) -> ScalarField { + ScalarField::::from_be_bytes_mod_order(buf) + } +} + +#[cfg(test)] +mod tests { + use crate::testing::{ + suite::{Public, Secret}, + TEST_SEED, + }; + use ark_serialize::{CanonicalDeserialize, CanonicalSerialize}; + + #[test] + fn codec_works() { + let secret = Secret::from_seed(TEST_SEED); + + let mut buf = Vec::new(); + secret.serialize_compressed(&mut buf).unwrap(); + let secret2 = Secret::deserialize_compressed(&mut &buf[..]).unwrap(); + assert_eq!(secret, secret2); + + let mut buf = Vec::new(); + let public = secret.public(); + public.serialize_compressed(&mut buf).unwrap(); + let public2 = Public::deserialize_compressed(&mut &buf[..]).unwrap(); + assert_eq!(public, public2); + } +} diff --git a/src/ietf.rs b/src/ietf.rs index 80566ff..1c7a6d7 100644 --- a/src/ietf.rs +++ b/src/ietf.rs @@ -26,12 +26,17 @@ impl CanonicalSerialize for Proof { mut writer: W, _compress_always: ark_serialize::Compress, ) -> Result<(), ark_serialize::SerializationError> { - let buf = utils::encode_scalar::(&self.c); - if buf.len() < S::CHALLENGE_LEN { + let c_buf = utils::scalar_encode::(&self.c); + if c_buf.len() < S::CHALLENGE_LEN { // Encoded scalar length must be at least S::CHALLENGE_LEN return Err(ark_serialize::SerializationError::NotEnoughSpace); } - writer.write_all(&buf[..S::CHALLENGE_LEN])?; + let buf = if S::Codec::BIG_ENDIAN { + &c_buf[c_buf.len() - S::CHALLENGE_LEN..] + } else { + &c_buf[..S::CHALLENGE_LEN] + }; + writer.write_all(buf)?; self.s.serialize_compressed(&mut writer)?; Ok(()) } @@ -47,11 +52,11 @@ impl CanonicalDeserialize for Proof { _compress_always: ark_serialize::Compress, validate: ark_serialize::Validate, ) -> Result { - let c = as CanonicalDeserialize>::deserialize_with_mode( - &mut reader, - ark_serialize::Compress::No, - validate, - )?; + let mut c_buf = ark_std::vec![0; S::CHALLENGE_LEN]; + if reader.read_exact(&mut c_buf[..]).is_err() { + return Err(ark_serialize::SerializationError::InvalidData); + } + let c = utils::scalar_decode::(&c_buf); let s = as CanonicalDeserialize>::deserialize_with_mode( &mut reader, ark_serialize::Compress::No, @@ -139,8 +144,8 @@ pub mod testing { impl core::fmt::Debug for TestVector { fn fmt(&self, f: &mut core::fmt::Formatter<'_>) -> core::fmt::Result { - let c = hex::encode(utils::encode_scalar::(&self.c)); - let s = hex::encode(utils::encode_scalar::(&self.s)); + let c = hex::encode(utils::scalar_encode::(&self.c)); + let s = hex::encode(utils::scalar_encode::(&self.s)); f.debug_struct("TestVector") .field("base", &self.base) .field("proof_c", &c) @@ -174,15 +179,22 @@ pub mod testing { fn from_map(map: &common::TestVectorMap) -> Self { let base = common::TestVector::from_map(map); - let c = utils::decode_scalar::(&map.item_bytes("proof_c")); - let s = utils::decode_scalar::(&map.item_bytes("proof_s")); + let c = utils::scalar_decode::(&map.item_bytes("proof_c")); + let s = utils::scalar_decode::(&map.item_bytes("proof_s")); Self { base, c, s } } fn to_map(&self) -> common::TestVectorMap { + let buf = utils::scalar_encode::(&self.c); + let proof_c = if S::Codec::BIG_ENDIAN { + let len = buf.len(); + &buf[len - S::CHALLENGE_LEN..] + } else { + &buf[..S::CHALLENGE_LEN] + }; let items = [ - ("proof_c", hex::encode(utils::encode_scalar::(&self.c))), - ("proof_s", hex::encode(utils::encode_scalar::(&self.s))), + ("proof_c", hex::encode(proof_c)), + ("proof_s", hex::encode(utils::scalar_encode::(&self.s))), ]; let mut map = self.base.to_map(); items.into_iter().for_each(|(name, value)| { diff --git a/src/lib.rs b/src/lib.rs index 02f7225..6eba1b3 100644 --- a/src/lib.rs +++ b/src/lib.rs @@ -23,6 +23,7 @@ use ark_std::vec::Vec; use digest::Digest; +pub mod codec; pub mod ietf; pub mod pedersen; pub mod suites; @@ -51,6 +52,8 @@ pub type CurveConfig = as AffineRepr>::Config; pub type HashOutput = digest::Output<::Hasher>; +pub use codec::Codec; + #[derive(Debug)] pub enum Error { /// Verification error(s) @@ -86,6 +89,8 @@ pub trait Suite: Copy + Clone { /// Used wherever an hash is required: nonce, challenge, MAC, etc. type Hasher: Digest; + type Codec: codec::Codec; + /// Nonce generation as described by RFC-9381 section 5.4.2. /// /// The default implementation provides the variant described @@ -128,22 +133,6 @@ pub trait Suite: Copy + Clone { fn point_to_hash(pt: &AffinePoint) -> HashOutput { utils::point_to_hash_rfc_9381::(pt) } - - fn point_encode(pt: &AffinePoint, buf: &mut Vec) { - pt.serialize_compressed(buf).unwrap(); - } - - fn point_decode(buf: &[u8]) -> AffinePoint { - AffinePoint::::deserialize_compressed(buf).unwrap() - } - - fn scalar_encode(sc: &ScalarField, buf: &mut Vec) { - sc.serialize_compressed(buf).unwrap(); - } - - fn scalar_decode(buf: &[u8]) -> ScalarField { - >::from_le_bytes_mod_order(buf) - } } /// Secret key. @@ -271,26 +260,9 @@ impl Output { mod tests { use crate::testing::{ random_val, - suite::{Input, Public, Secret}, + suite::{Input, Secret}, TEST_SEED, }; - use ark_serialize::{CanonicalDeserialize, CanonicalSerialize}; - - #[test] - fn codec_works() { - let secret = Secret::from_seed(TEST_SEED); - - let mut buf = Vec::new(); - secret.serialize_compressed(&mut buf).unwrap(); - let secret2 = Secret::deserialize_compressed(&mut &buf[..]).unwrap(); - assert_eq!(secret, secret2); - - let mut buf = Vec::new(); - let public = secret.public(); - public.serialize_compressed(&mut buf).unwrap(); - let public2 = Public::deserialize_compressed(&mut &buf[..]).unwrap(); - assert_eq!(public, public2); - } #[test] fn proof_to_hash_works() { diff --git a/src/pedersen.rs b/src/pedersen.rs index 209ef42..19c2ee6 100644 --- a/src/pedersen.rs +++ b/src/pedersen.rs @@ -198,12 +198,12 @@ pub mod testing { fn from_map(map: &common::TestVectorMap) -> Self { let base = common::TestVector::from_map(map); - let blind = utils::decode_scalar::(&map.item_bytes("blinding")); + let blind = utils::scalar_decode::(&map.item_bytes("blinding")); let pk_blind = utils::decode_point::(&map.item_bytes("proof_pkb")); let r = utils::decode_point::(&map.item_bytes("proof_r")); let ok = utils::decode_point::(&map.item_bytes("proof_ok")); - let s = utils::decode_scalar::(&map.item_bytes("proof_s")); - let sb = utils::decode_scalar::(&map.item_bytes("proof_sb")); + let s = utils::scalar_decode::(&map.item_bytes("proof_s")); + let sb = utils::scalar_decode::(&map.item_bytes("proof_sb")); let proof = Proof { pk_blind, r, @@ -218,7 +218,7 @@ pub mod testing { let items = [ ( "blinding", - hex::encode(utils::encode_scalar::(&self.blind)), + hex::encode(utils::scalar_encode::(&self.blind)), ), ( "proof_pkb", @@ -234,11 +234,11 @@ pub mod testing { ), ( "proof_s", - hex::encode(utils::encode_scalar::(&self.proof.s)), + hex::encode(utils::scalar_encode::(&self.proof.s)), ), ( "proof_sb", - hex::encode(utils::encode_scalar::(&self.proof.sb)), + hex::encode(utils::scalar_encode::(&self.proof.sb)), ), ]; let mut map = self.base.to_map(); diff --git a/src/ring.rs b/src/ring.rs index 43763e8..4e7f7d2 100644 --- a/src/ring.rs +++ b/src/ring.rs @@ -1,3 +1,4 @@ +use crate::utils::SWMapping; use crate::*; use ark_ec::short_weierstrass::SWCurveConfig; use pedersen::{PedersenSuite, Proof as PedersenProof}; @@ -118,7 +119,7 @@ impl Verifier for Public where BaseField: ark_ff::PrimeField, CurveConfig: SWCurveConfig, - AffinePoint: IntoSW>, + AffinePoint: SWMapping>, { fn verify( input: Input, @@ -156,7 +157,7 @@ impl RingContext where BaseField: ark_ff::PrimeField, CurveConfig: SWCurveConfig + Clone, - AffinePoint: IntoSW>, + AffinePoint: SWMapping>, { /// Construct a new ring context suitable to manage the given ring size. pub fn from_seed(ring_size: usize, seed: [u8; 32]) -> Self { @@ -232,7 +233,7 @@ impl CanonicalSerialize for RingContext where BaseField: ark_ff::PrimeField, CurveConfig: SWCurveConfig + Clone, - AffinePoint: IntoSW>, + AffinePoint: SWMapping>, { fn serialize_with_mode( &self, @@ -252,7 +253,7 @@ impl CanonicalDeserialize for RingContext where BaseField: ark_ff::PrimeField, CurveConfig: SWCurveConfig + Clone, - AffinePoint: IntoSW>, + AffinePoint: SWMapping>, { fn deserialize_with_mode( mut reader: R, @@ -277,36 +278,18 @@ impl ark_serialize::Valid for RingContext where BaseField: ark_ff::PrimeField, CurveConfig: SWCurveConfig + Clone, - AffinePoint: IntoSW>, + AffinePoint: SWMapping>, { fn check(&self) -> Result<(), ark_serialize::SerializationError> { self.pcs_params.check() } } -pub trait IntoSW { - fn into_sw(self) -> ark_ec::short_weierstrass::Affine; -} - -impl IntoSW for ark_ec::short_weierstrass::Affine { - fn into_sw(self) -> ark_ec::short_weierstrass::Affine { - self - } -} - -impl IntoSW for ark_ec::twisted_edwards::Affine { - fn into_sw(self) -> ark_ec::short_weierstrass::Affine { - const ERR_MSG: &str = - "'IntoSW' is expected to be implemented only for curves supporting the mapping"; - utils::ark_next::map_te_to_sw(&self).expect(ERR_MSG) - } -} - pub(crate) fn make_piop_params(domain_size: usize) -> PiopParams where BaseField: ark_ff::PrimeField, CurveConfig: SWCurveConfig, - AffinePoint: IntoSW>, + AffinePoint: SWMapping>, { let domain = ring_proof::Domain::new(domain_size, true); PiopParams::::setup( diff --git a/src/suites/bandersnatch.rs b/src/suites/bandersnatch.rs index ce53e17..975a89f 100644 --- a/src/suites/bandersnatch.rs +++ b/src/suites/bandersnatch.rs @@ -70,6 +70,7 @@ pub mod weierstrass { type Affine = ark_ed_on_bls12_381_bandersnatch::SWAffine; type Hasher = sha2::Sha512; + type Codec = codec::ArkworksCodec; } impl PedersenSuite for BandersnatchSha512Tai { @@ -132,6 +133,7 @@ pub mod edwards { type Affine = ark_ed_on_bls12_381_bandersnatch::EdwardsAffine; type Hasher = sha2::Sha512; + type Codec = codec::ArkworksCodec; /// Hash data to a curve point using Elligator2 method described by RFC 9380. fn data_to_point(data: &[u8]) -> Option { diff --git a/src/suites/ed25519.rs b/src/suites/ed25519.rs index 1137cf8..635cad4 100644 --- a/src/suites/ed25519.rs +++ b/src/suites/ed25519.rs @@ -62,6 +62,7 @@ impl Suite for Ed25519Sha512Tai { type Affine = ark_ed25519::EdwardsAffine; type Hasher = sha2::Sha512; + type Codec = codec::ArkworksCodec; } impl PedersenSuite for Ed25519Sha512Tai { diff --git a/src/suites/secp256.rs b/src/suites/secp256.rs index 3736203..eefe321 100644 --- a/src/suites/secp256.rs +++ b/src/suites/secp256.rs @@ -65,6 +65,7 @@ impl Suite for P256Sha256Tai { type Affine = ark_secp256r1::Affine; type Hasher = sha2::Sha256; + type Codec = codec::Sec1Codec; fn nonce(sk: &ScalarField, pt: Input) -> ScalarField { utils::nonce_rfc_6979::(sk, &pt.0) @@ -73,56 +74,6 @@ impl Suite for P256Sha256Tai { fn data_to_point(data: &[u8]) -> Option { utils::hash_to_curve_tai_rfc_9381::(data, true) } - - /// Encode point according to Section 2.3.3 "SEC 1: Elliptic Curve Cryptography", - /// (https://www.secg.org/sec1-v2.pdf) with point compression on. - fn point_encode(pt: &AffinePoint, buf: &mut Vec) { - use ark_ff::biginteger::BigInteger; - let mut tmp = Vec::new(); - - if pt.is_zero() { - buf.push(0x00); - return; - } - let is_odd = pt.y.into_bigint().is_odd(); - buf.push(if is_odd { 0x03 } else { 0x02 }); - - pt.x.serialize_compressed(&mut tmp).unwrap(); - tmp.reverse(); - buf.extend_from_slice(&tmp[..]); - } - - /// Encode point according to Section 2.3.3 "SEC 1: Elliptic Curve Cryptography", - /// (https://www.secg.org/sec1-v2.pdf) with point compression on. - fn point_decode(buf: &[u8]) -> AffinePoint { - use ark_ff::biginteger::BigInteger; - if buf.len() == 1 && buf[0] == 0x00 { - return AffinePoint::zero(); - } - let mut tmp = buf.to_vec(); - tmp.reverse(); - let y_flag = tmp.pop().unwrap(); - - let x = BaseField::deserialize_compressed(&mut &tmp[..]).unwrap(); - let (y1, y2) = AffinePoint::get_ys_from_x_unchecked(x).unwrap(); - let y = if ((y_flag & 0x01) != 0) == y1.into_bigint().is_odd() { - y1 - } else { - y2 - }; - AffinePoint::new_unchecked(x, y) - } - - fn scalar_encode(sc: &ScalarField, buf: &mut Vec) { - let mut tmp = Vec::new(); - sc.serialize_compressed(&mut tmp).unwrap(); - tmp.reverse(); - buf.extend_from_slice(&tmp[..]); - } - - fn scalar_decode(buf: &[u8]) -> ScalarField { - ScalarField::from_be_bytes_mod_order(buf) - } } impl PedersenSuite for P256Sha256Tai { diff --git a/src/testing.rs b/src/testing.rs index 85409c9..f197197 100644 --- a/src/testing.rs +++ b/src/testing.rs @@ -19,6 +19,7 @@ pub(crate) mod suite { type Affine = ark_ed25519::EdwardsAffine; type Hasher = sha2::Sha256; + type Codec = codec::ArkworksCodec; } suite_types!(TestSuite); @@ -71,7 +72,7 @@ pub fn ring_prove_verify() where BaseField: ark_ff::PrimeField, CurveConfig: ark_ec::short_weierstrass::SWCurveConfig + Clone, - AffinePoint: ring::IntoSW>, + AffinePoint: utils::SWMapping>, { use ring::{Prover, RingContext, Verifier}; @@ -104,9 +105,9 @@ pub fn check_complement_point() where BaseField: ark_ff::PrimeField, CurveConfig: ark_ec::short_weierstrass::SWCurveConfig + Clone, - AffinePoint: ring::IntoSW>, + AffinePoint: utils::SWMapping>, { - use ring::IntoSW; + use utils::SWMapping; let pt = S::COMPLEMENT_POINT.into_sw(); assert!(pt.is_on_curve()); assert!(!pt.is_in_correct_subgroup_assuming_on_curve()); @@ -151,7 +152,7 @@ macro_rules! ring_suite_tests { impl core::fmt::Debug for TestVector { fn fmt(&self, f: &mut core::fmt::Formatter<'_>) -> core::fmt::Result { - let sk = hex::encode(utils::encode_scalar::(&self.sk)); + let sk = hex::encode(utils::scalar_encode::(&self.sk)); let pk = hex::encode(utils::encode_point::(&self.pk)); let alpha = hex::encode(&self.alpha); let ad = hex::encode(&self.ad); @@ -254,7 +255,7 @@ impl TestVectorTrait for TestVector { let item_bytes = |field| hex::decode(map.0.get(field).unwrap()).unwrap(); let comment = map.0.get("comment").unwrap().to_string(); let flags = item_bytes("flags")[0]; - let sk = utils::decode_scalar::(&item_bytes("sk")); + let sk = utils::scalar_decode::(&item_bytes("sk")); let pk = utils::decode_point::(&item_bytes("pk")); let alpha = item_bytes("alpha"); let ad = item_bytes("ad"); @@ -278,7 +279,7 @@ impl TestVectorTrait for TestVector { let items = [ ("comment", self.comment.clone()), ("flags", hex::encode([self.flags])), - ("sk", hex::encode(utils::encode_scalar::(&self.sk))), + ("sk", hex::encode(utils::scalar_encode::(&self.sk))), ("pk", hex::encode(utils::encode_point::(&self.pk))), ("alpha", hex::encode(&self.alpha)), ("ad", hex::encode(&self.ad)), diff --git a/src/utils.rs b/src/utils.rs index 916b28c..658e3ac 100644 --- a/src/utils.rs +++ b/src/utils.rs @@ -1,4 +1,4 @@ -use crate::{AffinePoint, HashOutput, ScalarField, Suite}; +use crate::{AffinePoint, Codec, HashOutput, ScalarField, Suite}; use ark_ec::AffineRepr; use ark_ff::PrimeField; @@ -149,7 +149,7 @@ pub fn challenge_rfc_9381(pts: &[&AffinePoint], ad: &[u8]) -> Scala const DOM_SEP_END: u8 = 0x00; let mut buf = [S::SUITE_ID, &[DOM_SEP_START]].concat(); pts.iter().for_each(|p| { - S::point_encode(p, &mut buf); + S::Codec::point_encode(p, &mut buf); }); buf.extend_from_slice(ad); buf.push(DOM_SEP_END); @@ -162,7 +162,7 @@ pub fn point_to_hash_rfc_9381(pt: &AffinePoint) -> HashOutput { const DOM_SEP_START: u8 = 0x03; const DOM_SEP_END: u8 = 0x00; let mut buf = [S::SUITE_ID, &[DOM_SEP_START]].concat(); - S::point_encode(pt, &mut buf); + S::Codec::point_encode(pt, &mut buf); buf.push(DOM_SEP_END); hash::(&buf) } @@ -181,14 +181,14 @@ pub fn point_to_hash_rfc_9381(pt: &AffinePoint) -> HashOutput { /// /// This function panics if `Hash` is less than 32 bytes. pub fn nonce_rfc_8032(sk: &ScalarField, input: &AffinePoint) -> ScalarField { - let raw = encode_scalar::(sk); + let raw = scalar_encode::(sk); let sk_hash = &hash::(&raw)[32..]; let raw = encode_point::(input); let v = [sk_hash, &raw[..]].concat(); let h = &hash::(&v)[..]; - S::scalar_decode(h) + S::Codec::scalar_decode(h) } /// Nonce generation according to RFC 9381 section 5.4.2.1. @@ -197,8 +197,7 @@ pub fn nonce_rfc_8032(sk: &ScalarField, input: &AffinePoint) -> /// the Digital Signature Algorithm (DSA) and Elliptic Curve Digital Signature /// Algorithm (ECDSA)". /// -/// The algorithm generate the nonce value in a deterministic -/// pseudorandom fashion. +/// The algorithm generate the nonce value in a deterministic pseudorandom fashion. #[cfg(feature = "rfc-6979")] pub fn nonce_rfc_6979(sk: &ScalarField, input: &AffinePoint) -> ScalarField where @@ -211,7 +210,7 @@ where let k = [0; 32]; // K = HMAC_K(V || 0x00 || int2octets(x) || bits2octets(h1)) - let x = encode_scalar::(sk); + let x = scalar_encode::(sk); let raw = [&v[..], &[0x00], &x[..], &h1[..]].concat(); let k = hmac::(&k, &raw); @@ -228,27 +227,31 @@ where // TODO: loop until 1 < k < q let v = hmac::(&k, &v); - S::scalar_decode(&v) + S::Codec::scalar_decode(&v) } +/// Point encoder wrapper using `Suite::Codec`. pub fn encode_point(pt: &AffinePoint) -> Vec { let mut buf = Vec::new(); - S::point_encode(pt, &mut buf); + S::Codec::point_encode(pt, &mut buf); buf } +/// Point decoder wrapper using `Suite::Codec`. pub fn decode_point(buf: &[u8]) -> AffinePoint { - S::point_decode(buf) + S::Codec::point_decode(buf) } -pub fn encode_scalar(sc: &ScalarField) -> Vec { +/// Scalar encoder wrapper using `Suite::Codec`. +pub fn scalar_encode(sc: &ScalarField) -> Vec { let mut buf = Vec::new(); - S::scalar_encode(sc, &mut buf); + S::Codec::scalar_encode(sc, &mut buf); buf } -pub fn decode_scalar(buf: &[u8]) -> ScalarField { - S::scalar_decode(buf) +/// Scalar decoder wrapper using `Suite::Codec`. +pub fn scalar_decode(buf: &[u8]) -> ScalarField { + S::Codec::scalar_decode(buf) } // Upcoming Arkworks features. @@ -304,6 +307,41 @@ pub(crate) mod ark_next { } } +pub trait SWMapping { + fn from_sw(sw: ark_ec::short_weierstrass::Affine) -> Self; + fn into_sw(self) -> ark_ec::short_weierstrass::Affine; +} + +impl SWMapping + for ark_ec::short_weierstrass::Affine +{ + #[inline(always)] + fn from_sw(sw: ark_ec::short_weierstrass::Affine) -> Self { + sw + } + + #[inline(always)] + fn into_sw(self) -> ark_ec::short_weierstrass::Affine { + self + } +} + +impl SWMapping for ark_ec::twisted_edwards::Affine { + #[inline(always)] + fn from_sw(sw: ark_ec::short_weierstrass::Affine) -> Self { + const ERR_MSG: &str = + "SW to TE is expected to be implemented only for curves supporting the mapping"; + ark_next::map_sw_to_te(&sw).expect(ERR_MSG) + } + + #[inline(always)] + fn into_sw(self) -> ark_ec::short_weierstrass::Affine { + const ERR_MSG: &str = + "TE to SW is expected to be implemented only for curves supporting the mapping"; + ark_next::map_te_to_sw(&self).expect(ERR_MSG) + } +} + #[cfg(test)] mod tests { use super::*;