Skip to content
This repository was archived by the owner on Dec 18, 2023. It is now read-only.

Commit 7e4b4bf

Browse files
Non-canonical infinity point & bad flags in BLS12-381 serialization should fail (#176)
Co-authored-by: Kevaundray Wedderburn <kevtheappdev@gmail.com>
1 parent 4d1e504 commit 7e4b4bf

File tree

3 files changed

+128
-31
lines changed

3 files changed

+128
-31
lines changed

CHANGELOG.md

Lines changed: 2 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -14,6 +14,8 @@
1414

1515
### Bugfixes
1616

17+
- [\#176](https://github.com/arkworks-rs/curves/pull/176) Non-canonical infinity point and bad flags in BLS12-381 serialization should fail.
18+
1719
## v0.4.0
1820
- [\#76](https://github.com/arkworks-rs/curves/pull/76) twisted Edwards parameters for bls12-377
1921
- Fixed curve benches

bls12_381/src/curves/g1.rs

Lines changed: 52 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -180,6 +180,7 @@ mod test {
180180

181181
use super::*;
182182
use crate::g1;
183+
use ark_serialize::CanonicalDeserialize;
183184
use ark_std::{rand::Rng, UniformRand};
184185

185186
fn sample_unchecked() -> Affine<g1::Config> {
@@ -204,4 +205,55 @@ mod test {
204205
assert!(p.is_in_correct_subgroup_assuming_on_curve());
205206
}
206207
}
208+
209+
#[test]
210+
fn non_canonical_identity_point() {
211+
let non_canonical_hex = "c01000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000";
212+
let non_canonical_bytes = hex::decode(non_canonical_hex).unwrap();
213+
assert_eq!(non_canonical_bytes.len(), 48);
214+
215+
let maybe_affine_point: Result<G1Affine, ark_serialize::SerializationError> =
216+
CanonicalDeserialize::deserialize_compressed(&non_canonical_bytes[..]);
217+
218+
assert!(maybe_affine_point.is_err());
219+
220+
let non_canonical_hex_uncompressed = "c00000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000001";
221+
let non_canonical_bytes = hex::decode(non_canonical_hex_uncompressed).unwrap();
222+
assert_eq!(non_canonical_bytes.len(), 96);
223+
224+
let maybe_affine_point: Result<G1Affine, ark_serialize::SerializationError> =
225+
CanonicalDeserialize::deserialize_uncompressed(&non_canonical_bytes[..]);
226+
227+
assert!(maybe_affine_point.is_err())
228+
}
229+
230+
#[test]
231+
fn bad_flag_combination() {
232+
// See https://github.com/zkcrypto/pairing/tree/fa8103764a07bd273927447d434de18aace252d3/src/bls12_381#serialization
233+
// - Bit 1 is compressed/uncompressed
234+
// - Bit 2 is infinity
235+
// - Bit 3 is lexicographical order for compressed point deserialization
236+
// Hence `0b1110` ("e" in hex) or `0b0110` ("6" in hex") are both nonsensical.
237+
238+
// uncompressed, but lexicographically largest flag is set
239+
let non_canonical_hex = "600000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000";
240+
let non_canonical_bytes = hex::decode(non_canonical_hex).unwrap();
241+
assert_eq!(non_canonical_bytes.len(), 48);
242+
243+
let maybe_affine_point: Result<G1Affine, ark_serialize::SerializationError> =
244+
CanonicalDeserialize::deserialize_compressed(&non_canonical_bytes[..]);
245+
246+
assert!(maybe_affine_point.is_err());
247+
248+
// compressed, but infinity flag is set and lexicographically largest flag is
249+
// set
250+
let non_canonical_hex_2 = "e00000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000";
251+
252+
let non_canonical_bytes = hex::decode(non_canonical_hex_2).unwrap();
253+
assert_eq!(non_canonical_bytes.len(), 48);
254+
255+
let maybe_affine_point: Result<G1Affine, ark_serialize::SerializationError> =
256+
CanonicalDeserialize::deserialize_compressed(&non_canonical_bytes[..]);
257+
assert!(maybe_affine_point.is_err());
258+
}
207259
}

bls12_381/src/curves/util.rs

Lines changed: 74 additions & 31 deletions
Original file line numberDiff line numberDiff line change
@@ -14,17 +14,28 @@ pub struct EncodingFlags {
1414
}
1515

1616
impl EncodingFlags {
17-
pub fn get_flags(bytes: &[u8]) -> Self {
17+
/// Fetches the flags from the byte-string
18+
pub fn get_flags(bytes: &[u8]) -> Result<Self, SerializationError> {
1819
let compression_flag_set = (bytes[0] >> 7) & 1;
1920
let infinity_flag_set = (bytes[0] >> 6) & 1;
2021
let sort_flag_set = (bytes[0] >> 5) & 1;
2122

22-
Self {
23-
is_compressed: compression_flag_set == 1,
24-
is_infinity: infinity_flag_set == 1,
25-
is_lexographically_largest: sort_flag_set == 1,
23+
let is_compressed = compression_flag_set == 1;
24+
let is_infinity = infinity_flag_set == 1;
25+
let is_lexographically_largest = sort_flag_set == 1;
26+
27+
if is_lexographically_largest && (!is_compressed || is_infinity) {
28+
return Err(SerializationError::InvalidData);
2629
}
30+
31+
Ok(Self {
32+
is_compressed,
33+
is_infinity,
34+
is_lexographically_largest,
35+
})
2736
}
37+
38+
/// Encodes the flags into the byte-string
2839
pub fn encode_flags(&self, bytes: &mut [u8]) {
2940
if self.is_compressed {
3041
bytes[0] |= 1 << 7;
@@ -38,6 +49,13 @@ impl EncodingFlags {
3849
bytes[0] |= 1 << 5;
3950
}
4051
}
52+
53+
/// Removes the flags from the byte-string.
54+
///
55+
/// This reverses the effects of `encode_flags`.
56+
pub fn remove_flags(bytes: &mut [u8]) {
57+
bytes[0] &= 0b0001_1111;
58+
}
4159
}
4260

4361
pub(crate) fn deserialize_fq(bytes: [u8; 48]) -> Option<Fq> {
@@ -71,20 +89,15 @@ pub(crate) fn serialize_fq(field: Fq) -> [u8; 48] {
7189
result
7290
}
7391

74-
pub(crate) fn read_fq_with_offset(
75-
bytes: &[u8],
76-
offset: usize,
77-
mask: bool,
78-
) -> Result<Fq, ark_serialize::SerializationError> {
92+
fn read_bytes_with_offset(bytes: &[u8], offset: usize, mask: bool) -> [u8; G1_SERIALIZED_SIZE] {
7993
let mut tmp = [0; G1_SERIALIZED_SIZE];
8094
// read `G1_SERIALIZED_SIZE` bytes
8195
tmp.copy_from_slice(&bytes[offset * G1_SERIALIZED_SIZE..G1_SERIALIZED_SIZE * (offset + 1)]);
8296

8397
if mask {
84-
// Mask away the flag bits
85-
tmp[0] &= 0b0001_1111;
98+
EncodingFlags::remove_flags(&mut tmp);
8699
}
87-
deserialize_fq(tmp).ok_or(SerializationError::InvalidData)
100+
tmp
88101
}
89102

90103
pub(crate) fn read_g1_compressed<R: ark_serialize::Read>(
@@ -97,20 +110,26 @@ pub(crate) fn read_g1_compressed<R: ark_serialize::Read>(
97110
.ok_or(SerializationError::InvalidData)?;
98111

99112
// Obtain the three flags from the start of the byte sequence
100-
let flags = EncodingFlags::get_flags(&bytes[..]);
113+
let flags = EncodingFlags::get_flags(&bytes[..])?;
101114

102-
// we expect to be deserializing a compressed point
115+
// We expect to be deserializing a compressed point
103116
if !flags.is_compressed {
104117
return Err(SerializationError::UnexpectedFlags);
105118
}
106119

120+
// Attempt to obtain the x-coordinate
121+
let x_bytes = read_bytes_with_offset(&bytes, 0, true);
122+
107123
if flags.is_infinity {
124+
// Check that the `x` co-ordinate was `0`
125+
if x_bytes != [0u8; 48] {
126+
return Err(SerializationError::InvalidData);
127+
}
128+
108129
return Ok(G1Affine::zero());
109130
}
110131

111-
// Attempt to obtain the x-coordinate
112-
let x = read_fq_with_offset(&bytes, 0, true)?;
113-
132+
let x = deserialize_fq(x_bytes).ok_or(SerializationError::InvalidData)?;
114133
let p = G1Affine::get_point_from_x_unchecked(x, flags.is_lexographically_largest)
115134
.ok_or(SerializationError::InvalidData)?;
116135

@@ -126,22 +145,27 @@ pub(crate) fn read_g1_uncompressed<R: ark_serialize::Read>(
126145
.map_err(|_| SerializationError::InvalidData)?;
127146

128147
// Obtain the three flags from the start of the byte sequence
129-
let flags = EncodingFlags::get_flags(&bytes[..]);
148+
let flags = EncodingFlags::get_flags(&bytes[..])?;
130149

131150
// we expect to be deserializing an uncompressed point
132151
if flags.is_compressed {
133152
return Err(SerializationError::UnexpectedFlags);
134153
}
135154

155+
let x_bytes = read_bytes_with_offset(&bytes, 0, true);
156+
let y_bytes = read_bytes_with_offset(&bytes, 1, false);
157+
136158
if flags.is_infinity {
159+
if x_bytes != [0u8; 48] || y_bytes != [0u8; 48] {
160+
return Err(SerializationError::InvalidData);
161+
}
137162
return Ok(G1Affine::zero());
138163
}
139164

140165
// Attempt to obtain the x-coordinate
141-
let x = read_fq_with_offset(&bytes, 0, true)?;
166+
let x = deserialize_fq(x_bytes).ok_or(SerializationError::InvalidData)?;
142167
// Attempt to obtain the y-coordinate
143-
let y = read_fq_with_offset(&bytes, 1, false)?;
144-
168+
let y = deserialize_fq(y_bytes).ok_or(SerializationError::InvalidData)?;
145169
let p = G1Affine::new_unchecked(x, y);
146170

147171
Ok(p)
@@ -156,21 +180,26 @@ pub(crate) fn read_g2_compressed<R: ark_serialize::Read>(
156180
.map_err(|_| SerializationError::InvalidData)?;
157181

158182
// Obtain the three flags from the start of the byte sequence
159-
let flags = EncodingFlags::get_flags(&bytes);
183+
let flags = EncodingFlags::get_flags(&bytes)?;
160184

161185
// we expect to be deserializing a compressed point
162186
if !flags.is_compressed {
163187
return Err(SerializationError::UnexpectedFlags);
164188
}
165189

190+
let xc1_bytes = read_bytes_with_offset(&bytes, 0, true);
191+
let xc0_bytes = read_bytes_with_offset(&bytes, 1, false);
192+
166193
if flags.is_infinity {
194+
if xc1_bytes != [0u8; 48] || xc0_bytes != [0u8; 48] {
195+
return Err(SerializationError::InvalidData);
196+
}
167197
return Ok(G2Affine::zero());
168198
}
169199

170200
// Attempt to obtain the x-coordinate
171-
let xc1 = read_fq_with_offset(&bytes, 0, true)?;
172-
let xc0 = read_fq_with_offset(&bytes, 1, false)?;
173-
201+
let xc1 = deserialize_fq(xc1_bytes).ok_or(SerializationError::InvalidData)?;
202+
let xc0 = deserialize_fq(xc0_bytes).ok_or(SerializationError::InvalidData)?;
174203
let x = Fq2::new(xc0, xc1);
175204

176205
let p = G2Affine::get_point_from_x_unchecked(x, flags.is_lexographically_largest)
@@ -188,25 +217,39 @@ pub(crate) fn read_g2_uncompressed<R: ark_serialize::Read>(
188217
.map_err(|_| SerializationError::InvalidData)?;
189218

190219
// Obtain the three flags from the start of the byte sequence
191-
let flags = EncodingFlags::get_flags(&bytes);
220+
let flags = EncodingFlags::get_flags(&bytes)?;
192221

193222
// we expect to be deserializing an uncompressed point
194223
if flags.is_compressed {
195224
return Err(SerializationError::UnexpectedFlags);
196225
}
197226

227+
let xc1_bytes = read_bytes_with_offset(&bytes, 0, true);
228+
let xc0_bytes = read_bytes_with_offset(&bytes, 1, false);
229+
230+
let yc1_bytes = read_bytes_with_offset(&bytes, 2, false);
231+
let yc0_bytes = read_bytes_with_offset(&bytes, 3, false);
232+
198233
if flags.is_infinity {
234+
if xc1_bytes != [0u8; 48]
235+
|| xc0_bytes != [0u8; 48]
236+
|| yc1_bytes != [0u8; 48]
237+
|| yc0_bytes != [0u8; 48]
238+
{
239+
return Err(SerializationError::InvalidData);
240+
}
199241
return Ok(G2Affine::zero());
200242
}
201243

244+
let xc1 = deserialize_fq(xc1_bytes).ok_or(SerializationError::InvalidData)?;
245+
let xc0 = deserialize_fq(xc0_bytes).ok_or(SerializationError::InvalidData)?;
246+
let yc1 = deserialize_fq(yc1_bytes).ok_or(SerializationError::InvalidData)?;
247+
let yc0 = deserialize_fq(yc0_bytes).ok_or(SerializationError::InvalidData)?;
248+
202249
// Attempt to obtain the x-coordinate
203-
let xc1 = read_fq_with_offset(&bytes, 0, true)?;
204-
let xc0 = read_fq_with_offset(&bytes, 1, false)?;
205250
let x = Fq2::new(xc0, xc1);
206251

207252
// Attempt to obtain the y-coordinate
208-
let yc1 = read_fq_with_offset(&bytes, 2, false)?;
209-
let yc0 = read_fq_with_offset(&bytes, 3, false)?;
210253
let y = Fq2::new(yc0, yc1);
211254

212255
let p = G2Affine::new_unchecked(x, y);

0 commit comments

Comments
 (0)