Skip to content

Commit

Permalink
Codec trait (#22)
Browse files Browse the repository at this point in the history
Currently providing 2 implementations:
- Arkworks
- SEC1

Fix serialization of IETF proof when Suite::CHALLENGE_LEN < scalar.serialized_size()
  • Loading branch information
davxy authored Jul 8, 2024
1 parent c922816 commit d466265
Show file tree
Hide file tree
Showing 14 changed files with 270 additions and 167 deletions.
12 changes: 6 additions & 6 deletions data/bandersnatch_sw_sha512_tai_ietf_vectors.json
Original file line number Diff line number Diff line change
@@ -1,6 +1,6 @@
[
{
"comment": "Bandersnatch_SW_SHA-512_ELL2 - vector-1",
"comment": "Bandersnatch_SW_SHA-512_TAI - vector-1",
"flags": "00",
"sk": "2bd8776e6ca6a43d51987f756be88b643ab4431b523132f675c8f0004f5d5a17",
"pk": "f55a48e6befa22dc42007ec6d5bf83620d8f794901f42b18c10a4f7a6176985280",
Expand All @@ -13,7 +13,7 @@
"proof_s": "c05e6568ece7e8409f86962e48594546fcaf7d338ca9938c77db863b1d294f11"
},
{
"comment": "Bandersnatch_SW_SHA-512_ELL2 - vector-2",
"comment": "Bandersnatch_SW_SHA-512_TAI - vector-2",
"flags": "00",
"sk": "3d6406500d4009fdf2604546093665911e753f2213570a29521fd88bc30ede18",
"pk": "fd72a90d1eeba6733824e76bb31991b8108d6562756b85f244333e3c7205225200",
Expand All @@ -26,7 +26,7 @@
"proof_s": "56c0c71dc0250b3b8e51b66e8c2794e76974840f0d3176db4bb2207cf080db1c"
},
{
"comment": "Bandersnatch_SW_SHA-512_ELL2 - vector-3",
"comment": "Bandersnatch_SW_SHA-512_TAI - vector-3",
"flags": "00",
"sk": "8b9063872331dda4c3c282f7d813fb3c13e7339b7dc9635fdc764e32cc57cb15",
"pk": "e30eae606d21dff460cdaecfc9bfcd2e319628ccc0242f3ca21f2d5c940ba41680",
Expand All @@ -39,7 +39,7 @@
"proof_s": "70a744c46a4e72826de234deebcb6e826e23e4375f5233fd78ed0a4353b6c508"
},
{
"comment": "Bandersnatch_SW_SHA-512_ELL2 - vector-4",
"comment": "Bandersnatch_SW_SHA-512_TAI - vector-4",
"flags": "00",
"sk": "6db187202f69e627e432296ae1d0f166ae6ac3c1222585b6ceae80ea07670b14",
"pk": "2a00e5a32e2f097858a1a4a73cf5c2fb4e6d375a4ea4cc3ae3e91660eade850c80",
Expand All @@ -52,7 +52,7 @@
"proof_s": "85c6fc7354bd6a2e52aa5b5e25269d9cba3c21b34f8f80c596a56f62d8fa911c"
},
{
"comment": "Bandersnatch_SW_SHA-512_ELL2 - vector-5",
"comment": "Bandersnatch_SW_SHA-512_TAI - vector-5",
"flags": "00",
"sk": "b56cc204f1b6c2323709012cb16c72f3021035ce935fbe69b600a88d842c7407",
"pk": "4f29d79a27b9545d7223431eb6a63776949454b16e2ac0b7a959304ce3e52b6a00",
Expand All @@ -65,7 +65,7 @@
"proof_s": "a228daebff3f0c261bd6382d4e0be4f043ee687704a6596f72c626bc52b31418"
},
{
"comment": "Bandersnatch_SW_SHA-512_ELL2 - vector-6",
"comment": "Bandersnatch_SW_SHA-512_TAI - vector-6",
"flags": "00",
"sk": "da36359bf1bfd1694d3ed359e7340bd02a6a5e54827d94db1384df29f5bdd302",
"pk": "e58e8ba2e99035fb7ae11fa14e2a609d6d13679278dac63ebee64ca8612ffa1480",
Expand Down
12 changes: 6 additions & 6 deletions data/ed25519_sha512_tai_ietf_vectors.json
Original file line number Diff line number Diff line change
Expand Up @@ -9,7 +9,7 @@
"h": "5e1dbdda4ce6a06e9e1062bf1c5d2ff6dd2689ba99a9b4c57e86bc8c9953e685",
"gamma": "bfc0b9986b3732c37543cd5f66830a2c0d5005ee5d1a075c5ac6504c7a25fc1d",
"beta": "1ce87ebb1710c3b55ff9c72e1d7e6f852bdbc4e793aa2ff1ceea1c698ca9b5b10709598d6c35b32e7caeecdb26e6d6f18199bc1545c3789fc30ef167ff8b51a5",
"proof_c": "ade440cd841f6432cf98f73e121d5a6e00000000000000000000000000000000",
"proof_c": "ade440cd841f6432cf98f73e121d5a6e",
"proof_s": "452e5cc6afe527b077a8e1c3602873d890cbf729b67ae43b8cb651a1f9164a0d"
},
{
Expand All @@ -22,7 +22,7 @@
"h": "a8b789c2545f3fbeb1f801136462a7734ff39a37d7352cc8d0b1d5bf2db54a08",
"gamma": "9aeb013867130f00b431f30038d5054dfcd29b45bb2402d6f4f6b65225504eec",
"beta": "ad37782159dce2a4674a2fa5e411df74c1bd4a59a119271252c459a6cb0a58d281705d0a0256a2a941a369f9f1d71dea0c1c7d3bf069cfd6edf92a267f8ed3f3",
"proof_c": "abd9d0175ef80e70c18d5f70f1a7d85900000000000000000000000000000000",
"proof_c": "abd9d0175ef80e70c18d5f70f1a7d859",
"proof_s": "3f5b2196bd1f7ea04323d2325fbf876884cbdcc8b83551af5a4ffbb46478d30f"
},
{
Expand All @@ -35,7 +35,7 @@
"h": "b2f60d2cdd4e97a5418b383abdea63c57aab609fa05f579c43374bf008e23964",
"gamma": "d0bd8c705293879daed5264dd2c4b129a941db2e9513544a17100ec4634dfd46",
"beta": "45e62e4c45a49aafc014042de5a870824326641d156bca99f894b2e8a0f44193f27ef85500a87a8ac023d66e4f70bb8b1bbb0b3d0b25eb5093ed99b3122fa1f4",
"proof_c": "9f4b10e5c86351368c14360e179fd44b00000000000000000000000000000000",
"proof_c": "9f4b10e5c86351368c14360e179fd44b",
"proof_s": "925a654de7718fe44b7742cf1804447322ba4f1d861d67d5f73a9f040eb9540f"
},
{
Expand All @@ -48,7 +48,7 @@
"h": "69567baa9dc496ef24aafb2da474ade1e1202f061e413ea33e0877dd8db085c7",
"gamma": "be9ce3cd0b2ee84de50ce670ae9673ab3b5dc8aaffc7474415e09699f7378155",
"beta": "c1e4361d7314984ce6b2d21fe4c27d12413fb5364e6e3be6894a3d5ad5c2f3783a03efc6908b203758eab6264cb03f1cdee98aa1ebe810f60e97f86cc27d221e",
"proof_c": "0d57ba5ec304a522f2f312f49613cd3800000000000000000000000000000000",
"proof_c": "0d57ba5ec304a522f2f312f49613cd38",
"proof_s": "1f748f63f83197bf22e67aa850c1301187cbf50cb40fcd305910e340dd5e140d"
},
{
Expand All @@ -61,7 +61,7 @@
"h": "71d02b885f6a81bb31d52c5bc54d8ba1c2fc52c42e34ddfd026e669caa30ca9e",
"gamma": "032de270c2fba46c5345d09b039c389e826deb4083bba4a0e343518b1377c80f",
"beta": "7ea607c45bc373ac948f334bb8d392b17f5156cf19330bab7a2572eaf7fcaa48fff48847d88a20659de03b2066ad8449d9c1fc3c178a6f3b074c33f7a74d0fde",
"proof_c": "666c44103979c11069521d4ba44e52fa00000000000000000000000000000000",
"proof_c": "666c44103979c11069521d4ba44e52fa",
"proof_s": "f99e18b0bf2ec11c6c33e8bde9758af2d07e38ba2848b6d5eb3462d156630009"
},
{
Expand All @@ -74,7 +74,7 @@
"h": "bc89b34d01bda6e9115462584ed1f5554e9035ea4b4b77d27dbd3027c16dfa43",
"gamma": "50901ea735820db2198b6b6a8c8b62abdbc7bedde6b88a30e439583f1e387103",
"beta": "4dc1285faf82ddd0b0b60f6055051d3f501e0b55b57d5f440467eea15dce84e56b20dca9462c11e57439b558b557f09a5ae8fbafea3f2d477f8f9aad10f19fc3",
"proof_c": "127dcc8f1a99979f5e4386f31d0af2f400000000000000000000000000000000",
"proof_c": "127dcc8f1a99979f5e4386f31d0af2f4",
"proof_s": "da688770c94d9fda341f68294beeb746f7f796fa801b07666b1b1039bd250505"
}
]
12 changes: 6 additions & 6 deletions data/secp256_sha256_tai_ietf_vectors.json
Original file line number Diff line number Diff line change
Expand Up @@ -9,7 +9,7 @@
"h": "02ce0d7db4edd23b99b3736443c53ad12b133266c45646be3902a854606672e6ef",
"gamma": "0298b844148e41d128a2f2d49eb208618039cd9ece62d0fc2659461ebe2c7df2ac",
"beta": "92fd1eff9ddb4173ee87c45476b1c43486da7c20efd7a3c5b88891cf99bcd680",
"proof_c": "000000000000000000000000000000007b331610072248bd15c7aef0832ae2f9",
"proof_c": "7b331610072248bd15c7aef0832ae2f9",
"proof_s": "d30c63494b5e087f26bca7e3b57dd4876f6d01597e9ac272f33c7ac48f11a5b8"
},
{
Expand All @@ -22,7 +22,7 @@
"h": "0242336b14ccf0f101b64a1cca4b08f72f6be851f717dc56c07262500a84071e06",
"gamma": "03b2ae579ccf850038d9ba6b94a87646e8ecdec789639aab4c1492f11b114e6515",
"beta": "f30b3d4fb257c287cf423d044796ce1eaf6ec9c9d42ae4305274dad9a2fbd4dd",
"proof_c": "00000000000000000000000000000000230bcb8e6ff67f15e2aa27a7e570e743",
"proof_c": "230bcb8e6ff67f15e2aa27a7e570e743",
"proof_s": "8fdd6585e6ca4e09ada872c09c2ccacaa60df0cd919b10c4e2a68285ba236105"
},
{
Expand All @@ -35,7 +35,7 @@
"h": "03b90c05c10ddbfdafda6a964012946641737f888b90c930abdd2454c568f73d70",
"gamma": "03c572767b190538c104182dfac985cd58430074f0db24305f4006d896c93b96c7",
"beta": "d06d1139d2f0199b36d48b8d4980421e3d98504442e475c1760dcbbd19d27543",
"proof_c": "000000000000000000000000000000004b5b11f3b418095a5d02f835a283cc99",
"proof_c": "4b5b11f3b418095a5d02f835a283cc99",
"proof_s": "78bd588629d7dbfa7909384fc55590443f7d72996655ca933bf5979ccf342e14"
},
{
Expand All @@ -48,7 +48,7 @@
"h": "02bca955248372984158e0f9bf7d8d9bb1ee87f188cf27e93647a967a0f9b09535",
"gamma": "0323c860bdba4ceccc28ebed9283dc6f74b4e16c94ffe566f49318b955458c20e3",
"beta": "cd9444669432a6b5ca8ad3a4adcd8152fb8a76e327877ea876e6bf3e761b6e13",
"proof_c": "00000000000000000000000000000000c209c08df735f0a15e6318fcb7158693",
"proof_c": "c209c08df735f0a15e6318fcb7158693",
"proof_s": "9db4909d8cab16e9e79004fc74fc66eb3560debbad9aaa01f61e4077f893f1f7"
},
{
Expand All @@ -61,7 +61,7 @@
"h": "039edace829d35ef117e135c8b81b5b00a0d3c9f24a349761fc07a7503bf048966",
"gamma": "02d7b9b206af6fae3a6225ef09701e965e5387a22eca8e1bc71c167cd53f66903a",
"beta": "0f4965be7ec48d075a0035d5d1874a05328cb9f1cbe6b4066813ebb38e46b101",
"proof_c": "00000000000000000000000000000000ebc773f07f5a559bf176f57f83d5d80d",
"proof_c": "ebc773f07f5a559bf176f57f83d5d80d",
"proof_s": "e0f386d45474ea6eb40ebf59e4c7e6695813b6bf4b7984db868e415d1d3432e9"
},
{
Expand All @@ -74,7 +74,7 @@
"h": "029d367ea8eaf47bdb453f836c0c6afe9e5540c33adf4b20e1ecdf96c45f07ed1f",
"gamma": "0288734131a4dddb3a69e0ff346bd6afa5fcf29a328f1a4f7fa00c512127960e33",
"beta": "0a53ce69bc69cf00fc7f42a74717d8271fb6f7124351697e64e006e1e77733ba",
"proof_c": "000000000000000000000000000000007a1041d32a0e043ab11822977b6108b9",
"proof_c": "7a1041d32a0e043ab11822977b6108b9",
"proof_s": "b46c510a1bd8605aaf76d61407ea94a1158244b9b513a1653dfd347e2ea33847"
}
]
7 changes: 7 additions & 0 deletions data/vectors-generate.sh
Original file line number Diff line number Diff line change
@@ -0,0 +1,7 @@
#!/usr/bin/env bash

cargo test \
--release \
--features full \
-- \
--ignored
136 changes: 136 additions & 0 deletions src/codec.rs
Original file line number Diff line number Diff line change
@@ -0,0 +1,136 @@
use ark_ec::short_weierstrass::SWCurveConfig;

use super::*;

/// Defines points and scalars encoding format.
pub trait Codec<S: Suite> {
const BIG_ENDIAN: bool;

/// Point encode.
fn point_encode(pt: &AffinePoint<S>, buf: &mut Vec<u8>);

/// Point decode.
fn point_decode(buf: &[u8]) -> AffinePoint<S>;

/// Scalar encode
fn scalar_encode(sc: &ScalarField<S>, buf: &mut Vec<u8>);

/// Scalar decode.
fn scalar_decode(buf: &[u8]) -> ScalarField<S>;
}

/// Arkworks codec.
///
/// Little endian. Points flags in MSB. Compression enabled.
pub struct ArkworksCodec;

impl<S: Suite> Codec<S> for ArkworksCodec {
const BIG_ENDIAN: bool = false;

fn point_encode(pt: &AffinePoint<S>, buf: &mut Vec<u8>) {
pt.serialize_compressed(buf).unwrap();
}

fn point_decode(buf: &[u8]) -> AffinePoint<S> {
AffinePoint::<S>::deserialize_compressed(buf).unwrap()
}

fn scalar_encode(sc: &ScalarField<S>, buf: &mut Vec<u8>) {
sc.serialize_compressed(buf).unwrap();
}

fn scalar_decode(buf: &[u8]) -> ScalarField<S> {
ScalarField::<S>::from_le_bytes_mod_order(buf)
}
}

/// SEC 1 codec (https://www.secg.org/sec1-v2.pdf)
///
/// Big endian. Points flags in LSB. Compression enabled.
pub struct Sec1Codec;

impl<S: Suite> Codec<S> for Sec1Codec
where
BaseField<S>: ark_ff::PrimeField,
CurveConfig<S>: SWCurveConfig,
AffinePoint<S>: utils::SWMapping<CurveConfig<S>>,
{
const BIG_ENDIAN: bool = true;

fn point_encode(pt: &AffinePoint<S>, buf: &mut Vec<u8>) {
use ark_ff::biginteger::BigInteger;
use utils::SWMapping;

if pt.is_zero() {
buf.push(0x00);
return;
}
let mut tmp = Vec::new();
let sw = pt.into_sw();

let is_odd = sw.y.into_bigint().is_odd();
buf.push(if is_odd { 0x03 } else { 0x02 });

sw.x.serialize_compressed(&mut tmp).unwrap();
tmp.reverse();
buf.extend_from_slice(&tmp[..]);
}

fn point_decode(buf: &[u8]) -> AffinePoint<S> {
use ark_ff::biginteger::BigInteger;
use utils::SWMapping;
type SWAffine<C> = ark_ec::short_weierstrass::Affine<C>;
if buf.len() == 1 && buf[0] == 0x00 {
return AffinePoint::<S>::zero();
}
let mut tmp = buf.to_vec();
tmp.reverse();
let y_flag = tmp.pop().unwrap();

let x = BaseField::<S>::deserialize_compressed(&mut &tmp[..]).unwrap();
let (y1, y2) = SWAffine::<CurveConfig<S>>::get_ys_from_x_unchecked(x).unwrap();
let y = if ((y_flag & 0x01) != 0) == y1.into_bigint().is_odd() {
y1
} else {
y2
};
let sw = SWAffine::<CurveConfig<S>>::new_unchecked(x, y);
AffinePoint::<S>::from_sw(sw)
}

fn scalar_encode(sc: &ScalarField<S>, buf: &mut Vec<u8>) {
let mut tmp = Vec::new();
sc.serialize_compressed(&mut tmp).unwrap();
tmp.reverse();
buf.extend_from_slice(&tmp[..]);
}

fn scalar_decode(buf: &[u8]) -> ScalarField<S> {
ScalarField::<S>::from_be_bytes_mod_order(buf)
}
}

#[cfg(test)]
mod tests {
use crate::testing::{
suite::{Public, Secret},
TEST_SEED,
};
use ark_serialize::{CanonicalDeserialize, CanonicalSerialize};

#[test]
fn codec_works() {
let secret = Secret::from_seed(TEST_SEED);

let mut buf = Vec::new();
secret.serialize_compressed(&mut buf).unwrap();
let secret2 = Secret::deserialize_compressed(&mut &buf[..]).unwrap();
assert_eq!(secret, secret2);

let mut buf = Vec::new();
let public = secret.public();
public.serialize_compressed(&mut buf).unwrap();
let public2 = Public::deserialize_compressed(&mut &buf[..]).unwrap();
assert_eq!(public, public2);
}
}
40 changes: 26 additions & 14 deletions src/ietf.rs
Original file line number Diff line number Diff line change
Expand Up @@ -26,12 +26,17 @@ impl<S: IetfSuite> CanonicalSerialize for Proof<S> {
mut writer: W,
_compress_always: ark_serialize::Compress,
) -> Result<(), ark_serialize::SerializationError> {
let buf = utils::encode_scalar::<S>(&self.c);
if buf.len() < S::CHALLENGE_LEN {
let c_buf = utils::scalar_encode::<S>(&self.c);
if c_buf.len() < S::CHALLENGE_LEN {
// Encoded scalar length must be at least S::CHALLENGE_LEN
return Err(ark_serialize::SerializationError::NotEnoughSpace);
}
writer.write_all(&buf[..S::CHALLENGE_LEN])?;
let buf = if S::Codec::BIG_ENDIAN {
&c_buf[c_buf.len() - S::CHALLENGE_LEN..]
} else {
&c_buf[..S::CHALLENGE_LEN]
};
writer.write_all(buf)?;
self.s.serialize_compressed(&mut writer)?;
Ok(())
}
Expand All @@ -47,11 +52,11 @@ impl<S: IetfSuite> CanonicalDeserialize for Proof<S> {
_compress_always: ark_serialize::Compress,
validate: ark_serialize::Validate,
) -> Result<Self, ark_serialize::SerializationError> {
let c = <ScalarField<S> as CanonicalDeserialize>::deserialize_with_mode(
&mut reader,
ark_serialize::Compress::No,
validate,
)?;
let mut c_buf = ark_std::vec![0; S::CHALLENGE_LEN];
if reader.read_exact(&mut c_buf[..]).is_err() {
return Err(ark_serialize::SerializationError::InvalidData);
}
let c = utils::scalar_decode::<S>(&c_buf);
let s = <ScalarField<S> as CanonicalDeserialize>::deserialize_with_mode(
&mut reader,
ark_serialize::Compress::No,
Expand Down Expand Up @@ -139,8 +144,8 @@ pub mod testing {

impl<S: IetfSuite> core::fmt::Debug for TestVector<S> {
fn fmt(&self, f: &mut core::fmt::Formatter<'_>) -> core::fmt::Result {
let c = hex::encode(utils::encode_scalar::<S>(&self.c));
let s = hex::encode(utils::encode_scalar::<S>(&self.s));
let c = hex::encode(utils::scalar_encode::<S>(&self.c));
let s = hex::encode(utils::scalar_encode::<S>(&self.s));
f.debug_struct("TestVector")
.field("base", &self.base)
.field("proof_c", &c)
Expand Down Expand Up @@ -174,15 +179,22 @@ pub mod testing {

fn from_map(map: &common::TestVectorMap) -> Self {
let base = common::TestVector::from_map(map);
let c = utils::decode_scalar::<S>(&map.item_bytes("proof_c"));
let s = utils::decode_scalar::<S>(&map.item_bytes("proof_s"));
let c = utils::scalar_decode::<S>(&map.item_bytes("proof_c"));
let s = utils::scalar_decode::<S>(&map.item_bytes("proof_s"));
Self { base, c, s }
}

fn to_map(&self) -> common::TestVectorMap {
let buf = utils::scalar_encode::<S>(&self.c);
let proof_c = if S::Codec::BIG_ENDIAN {
let len = buf.len();
&buf[len - S::CHALLENGE_LEN..]
} else {
&buf[..S::CHALLENGE_LEN]
};
let items = [
("proof_c", hex::encode(utils::encode_scalar::<S>(&self.c))),
("proof_s", hex::encode(utils::encode_scalar::<S>(&self.s))),
("proof_c", hex::encode(proof_c)),
("proof_s", hex::encode(utils::scalar_encode::<S>(&self.s))),
];
let mut map = self.base.to_map();
items.into_iter().for_each(|(name, value)| {
Expand Down
Loading

0 comments on commit d466265

Please sign in to comment.