Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

Security/prevent keypair to be copied or cloned #88

Merged
merged 2 commits into from
Oct 30, 2024
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
11 changes: 8 additions & 3 deletions libecvrf/examples/vrf.rs
Original file line number Diff line number Diff line change
@@ -1,5 +1,5 @@
use libecvrf::{
extends::ScalarExtend,
extends::{AffineExtend, ScalarExtend},

Check warning on line 2 in libecvrf/examples/vrf.rs

View workflow job for this annotation

GitHub Actions / Test

unused import: `AffineExtend`

Check failure on line 2 in libecvrf/examples/vrf.rs

View workflow job for this annotation

GitHub Actions / Clippy

unused import: `AffineExtend`

error: unused import: `AffineExtend` --> libecvrf/examples/vrf.rs:2:15 | 2 | extends::{AffineExtend, ScalarExtend}, | ^^^^^^^^^^^^ | = note: `-D unused-imports` implied by `-D warnings` = help: to override `-D warnings` add `#[allow(unused_imports)]`
helper::{calculate_witness_address, get_address},
secp256k1::{curve::Scalar, SecretKey},
util::thread_rng,
Expand All @@ -9,7 +9,12 @@

fn main() {
let key_pair = KeyPair::new();
let address = get_address(key_pair.public_key);
let address = get_address(&key_pair.public_key);
println!(
"PublicKey: {:#?}",
key_pair.public_key.serialize_compressed()
);

println!("Address: {}", hex::encode(address));

let affine = Affine::new(Field::from_int(4), Field::from_int(95));
Expand All @@ -23,7 +28,7 @@
let proof = ecvrf
.prove(&alpha)
.expect("Failed to prove ECVRF randomness");
println!("result: {:#?}", proof);
println!("result: {:#?} {:#?}", &alpha, proof);

println!("{:?}", ecvrf.verify(&alpha, &proof));

Expand Down
14 changes: 7 additions & 7 deletions libecvrf/src/ecvrf.rs
Original file line number Diff line number Diff line change
Expand Up @@ -24,7 +24,7 @@ pub trait Zeroable {
fn is_zero(&self) -> bool;
}

#[derive(Debug, Clone, Copy, Eq, PartialEq)]
#[derive(Debug, Eq, PartialEq)]
/// Key pair
pub struct KeyPair {
/// Public key
Expand All @@ -33,7 +33,7 @@ pub struct KeyPair {
pub secret_key: SecretKey,
}

#[derive(Debug, Clone, Copy, Eq, PartialEq)]
#[derive(Debug, Eq, PartialEq)]
/// Raw key pair
pub struct RawKeyPair {
/// Raw public key
Expand Down Expand Up @@ -116,8 +116,8 @@ impl From<String> for KeyPair {
}
}

impl From<KeyPair> for RawKeyPair {
fn from(value: KeyPair) -> Self {
impl From<&KeyPair> for RawKeyPair {
fn from(value: &KeyPair) -> Self {
RawKeyPair {
public_key: value.public_key.serialize(),
secret_key: value.secret_key.serialize(),
Expand Down Expand Up @@ -184,7 +184,7 @@ pub struct ECVRF<'a> {
ctx_gen: &'a ECMultGenContext,
}

impl ECVRF<'_> {
impl<'a> ECVRF<'a> {
/// Create new instance of ECVRF from a secret key
pub fn new(secret_key: SecretKey) -> Self {
ECVRF {
Expand Down Expand Up @@ -371,8 +371,8 @@ impl ECVRF<'_> {
&h,
&pub_affine,
&vrf_proof.gamma,
&jacobian_to_affine(&u),
&jacobian_to_affine(&v),
&Affine::from_jacobian(&u),
&Affine::from_jacobian(&v),
);

// y = keccak256(gama.encode())
Expand Down
16 changes: 4 additions & 12 deletions libecvrf/src/helper.rs
Original file line number Diff line number Diff line change
Expand Up @@ -67,26 +67,18 @@ pub fn projective_ec_add(a: &Affine, b: &Affine) -> Jacobian {
r
}

/// Quick transform a Jacobian to Affine and also normalize it
pub fn jacobian_to_affine(j: &Jacobian) -> Affine {
let mut ra = Affine::from_gej(j);
ra.x.normalize();
ra.y.normalize();
ra
}

/// Perform multiplication between a point and a scalar: a * P
pub fn ecmult(context: &ECMultContext, a: &Affine, na: &Scalar) -> Affine {
let mut rj = Jacobian::default();
context.ecmult(&mut rj, &Jacobian::from_ge(a), na, &Scalar::from_int(0));
jacobian_to_affine(&rj)
Affine::from_jacobian(&rj)
}

/// Perform multiplication between a value and G: a * G
pub fn ecmult_gen(context: &ECMultGenContext, ng: &Scalar) -> Affine {
let mut rj = Jacobian::default();
context.ecmult_gen(&mut rj, ng);
jacobian_to_affine(&rj)
Affine::from_jacobian(&rj)
}

/// Calculate witness address from a Affine
Expand All @@ -97,8 +89,8 @@ pub fn calculate_witness_address(witness: &Affine) -> [u8; 20] {
}

/// Has a Public Key and return a Ethereum address
pub fn get_address(pub_key: PublicKey) -> [u8; 20] {
let mut affine_pub: Affine = pub_key.into();
pub fn get_address(pub_key: &PublicKey) -> [u8; 20] {
let mut affine_pub: Affine = (*pub_key).into();
affine_pub.x.normalize();
affine_pub.y.normalize();
calculate_witness_address(&affine_pub)
Expand Down
12 changes: 6 additions & 6 deletions node/src/main.rs
Original file line number Diff line number Diff line change
Expand Up @@ -57,7 +57,7 @@ async fn orand_get_epoch(
network: i64,
address: String,
epoch: i64,
context: Arc<NodeContext>,
context: Arc<NodeContext<'_>>,
) -> Result<Response<BoxBody<Bytes, hyper::Error>>, hyper::Error> {
let postgres = context.postgres();
let randomness = postgres.table_randomness();
Expand All @@ -79,7 +79,7 @@ async fn orand_get_epoch(
}

async fn orand_new_epoch(
context: Arc<NodeContext>,
context: Arc<NodeContext<'_>>,
username: String,
network: i64,
address: String,
Expand All @@ -100,7 +100,7 @@ async fn orand_new_epoch(
/// path, and returns a Future of a Response.
async fn orand(
req: Request<hyper::body::Incoming>,
context: Arc<NodeContext>,
context: Arc<NodeContext<'_>>,
) -> Result<Response<BoxBody<Bytes, hyper::Error>>, hyper::Error> {
let (header, body) = req.into_parts();
match (&header.method, header.uri.path()) {
Expand Down Expand Up @@ -248,7 +248,7 @@ async fn orand(
// Generate hmac key if it didn't exist
let mut hmac_secret = [0u8; ORAND_HMAC_KEY_SIZE];
random_bytes(&mut hmac_secret);
let mut raw_keypair = RawKeyPair::from(KeyPair::new());
let mut raw_keypair = RawKeyPair::from(&KeyPair::new());
let insert_result = keyring
.insert(json!({
"username": username,
Expand Down Expand Up @@ -418,7 +418,7 @@ async fn main() -> Result<(), Box<dyn std::error::Error + Send + Sync>> {
// Generate new secret
Err(_) => KeyPair::new(),
};
let mut raw_keypair = RawKeyPair::from(new_keypair);
let mut raw_keypair = RawKeyPair::from(&new_keypair);
let insert_result = keyring
.insert(json!({
"username": ORAND_KEYRING_NAME,
Expand All @@ -443,7 +443,7 @@ async fn main() -> Result<(), Box<dyn std::error::Error + Send + Sync>> {
);
log::info!(
"Address of public key: 0x{}",
hex::encode(get_address(keypair.public_key))
hex::encode(get_address(&keypair.public_key))
);

// Create new node context
Expand Down
8 changes: 4 additions & 4 deletions node/src/node_context.rs
Original file line number Diff line number Diff line change
Expand Up @@ -5,8 +5,8 @@ use tokio::sync::Mutex;
use crate::postgres_sql::Postgres;

/// Node context
pub struct NodeContext {
ecvrf: ECVRF<'static>,
pub struct NodeContext<'a> {
ecvrf: ECVRF<'a>,
is_testnet: bool,
postgres: Postgres,
key_id: i64,
Expand All @@ -16,7 +16,7 @@ pub struct NodeContext {
pub sync: Mutex<bool>,
}

impl NodeContext {
impl<'a> NodeContext<'a> {
/// Create a new instance of node context
pub fn new(key_id: i64, keypair: KeyPair, is_testnet: bool, postgres: Postgres) -> Arc<Self> {
let ecvrf = ECVRF::new(keypair.secret_key);
Expand All @@ -41,7 +41,7 @@ impl NodeContext {
}

/// Get ECVRF instance
pub fn ecvrf(&self) -> &ECVRF<'static> {
pub fn ecvrf(&self) -> &ECVRF<'_> {
&self.ecvrf
}

Expand Down
2 changes: 1 addition & 1 deletion node/src/postgres/table/randomness.rs
Original file line number Diff line number Diff line change
Expand Up @@ -137,7 +137,7 @@ impl<'a> RandomnessTable<'a> {
/// Find randomness record by its network and address
pub async fn safe_insert(
&self,
context: Arc<NodeContext>,
context: Arc<NodeContext<'_>>,
username: String,
network: i64,
address: String,
Expand Down
Loading