1 // Copyright 2015-2019 Brian Smith. 2 // 3 // Permission to use, copy, modify, and/or distribute this software for any 4 // purpose with or without fee is hereby granted, provided that the above 5 // copyright notice and this permission notice appear in all copies. 6 // 7 // THE SOFTWARE IS PROVIDED "AS IS" AND THE AUTHORS DISCLAIM ALL WARRANTIES 8 // WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF 9 // MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL THE AUTHORS BE LIABLE FOR ANY 10 // SPECIAL, DIRECT, INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES 11 // WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN ACTION 12 // OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT OF OR IN 13 // CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE. 14 15 use crate::{digest, error, limb}; 16 17 #[repr(transparent)] 18 pub struct Scalar([u8; SCALAR_LEN]); 19 20 pub const SCALAR_LEN: usize = 32; 21 22 impl Scalar { 23 // Constructs a `Scalar` from `bytes`, failing if `bytes` encodes a scalar 24 // that not in the range [0, n). from_bytes_checked(bytes: [u8; SCALAR_LEN]) -> Result<Self, error::Unspecified>25 pub fn from_bytes_checked(bytes: [u8; SCALAR_LEN]) -> Result<Self, error::Unspecified> { 26 const ORDER: [limb::Limb; SCALAR_LEN / limb::LIMB_BYTES] = 27 limbs![0x5cf5d3ed, 0x5812631a, 0xa2f79cd6, 0x14def9de, 0, 0, 0, 0x10000000]; 28 29 // `bytes` is in little-endian order. 30 let mut reversed = bytes; 31 reversed.reverse(); 32 33 let mut limbs = [0; SCALAR_LEN / limb::LIMB_BYTES]; 34 limb::parse_big_endian_in_range_and_pad_consttime( 35 untrusted::Input::from(&reversed), 36 limb::AllowZero::Yes, 37 &ORDER, 38 &mut limbs, 39 )?; 40 41 Ok(Self(bytes)) 42 } 43 44 // Constructs a `Scalar` from `digest` reduced modulo n. from_sha512_digest_reduced(digest: digest::Digest) -> Self45 pub fn from_sha512_digest_reduced(digest: digest::Digest) -> Self { 46 prefixed_extern! { 47 fn x25519_sc_reduce(s: &mut UnreducedScalar); 48 } 49 let mut unreduced = [0u8; digest::SHA512_OUTPUT_LEN]; 50 unreduced.copy_from_slice(digest.as_ref()); 51 unsafe { x25519_sc_reduce(&mut unreduced) }; 52 Self((&unreduced[..SCALAR_LEN]).try_into().unwrap()) 53 } 54 } 55 56 #[repr(transparent)] 57 pub struct MaskedScalar([u8; SCALAR_LEN]); 58 59 impl MaskedScalar { from_bytes_masked(bytes: [u8; SCALAR_LEN]) -> Self60 pub fn from_bytes_masked(bytes: [u8; SCALAR_LEN]) -> Self { 61 prefixed_extern! { 62 fn x25519_sc_mask(a: &mut [u8; SCALAR_LEN]); 63 } 64 let mut r = Self(bytes); 65 unsafe { x25519_sc_mask(&mut r.0) }; 66 r 67 } 68 } 69 70 impl From<MaskedScalar> for Scalar { from(MaskedScalar(scalar): MaskedScalar) -> Self71 fn from(MaskedScalar(scalar): MaskedScalar) -> Self { 72 Self(scalar) 73 } 74 } 75 76 type UnreducedScalar = [u8; UNREDUCED_SCALAR_LEN]; 77 const UNREDUCED_SCALAR_LEN: usize = SCALAR_LEN * 2; 78