1use std::collections::HashMap;
3use std::sync::Arc;
4
5use super::traits::ecdsa::SparkSignerEcdsa;
7use super::traits::ecies::SparkSignerEcies;
8use super::traits::frost::SparkSignerFrost;
9use super::traits::frost_signing::SparkSignerFrostSigning;
10use super::traits::secp256k1::KeygenMethod;
11use super::traits::secp256k1::SparkSignerSecp256k1;
12use super::traits::shamir::SparkSignerShamir;
13use super::traits::SparkSigner;
14
15use crate::constants::spark::frost::FROST_USER_IDENTIFIER;
17use crate::constants::spark::frost::FROST_USER_KEY_PACKAGE_MIN_SIGNERS;
18use crate::constants::spark::frost::FROST_USER_SIGNING_ROLE;
19use crate::constants::spark::IDENTITY_KEYPAIR_INDEX;
20use crate::error::SparkSdkError;
21use crate::wallet::internal_handlers::traits::create_tree::DepositAddressTree;
22use crate::wallet::internal_handlers::traits::transfer::LeafKeyTweak;
23use crate::wallet::internal_handlers::traits::transfer::LeafRefundSigningData;
24use crate::wallet::internal_handlers::utils::bitcoin_tx_from_bytes;
25use crate::wallet::internal_handlers::utils::next_sequence;
26use crate::wallet::internal_handlers::utils::parsers::parse_secret_key;
27use crate::wallet::internal_handlers::utils::serialize_bitcoin_transaction;
28use crate::wallet::utils::bitcoin::sighash_from_tx;
29use crate::wallet::utils::transaction::ephemeral_anchor_output;
30use crate::SparkNetwork;
31
32use crate::common_types::types::frost::FrostNonce;
34use crate::common_types::types::frost::FrostNonceCommitment;
35use crate::common_types::types::frost::FrostSigningCommitments;
36use crate::common_types::types::frost::FrostSigningNonces;
37use crate::common_types::types::hex_decode;
38use crate::common_types::types::hex_encode;
39use crate::common_types::types::ChildNumber;
40use crate::common_types::types::HashbrownMap;
41use crate::common_types::types::RwLock;
42use crate::common_types::types::Secp256k1;
43use crate::common_types::types::Secp256k1Message;
44use crate::common_types::types::SparkRange;
45use crate::common_types::types::Transaction;
46use crate::common_types::types::Uuid;
47use crate::common_types::types::XPrv;
48use crate::common_types::types::U256;
49use crate::common_types::types::{PublicKey, SecretKey};
50
51use spark_cryptography::key_arithmetic::subtract_secret_keys;
53use spark_cryptography::secp256k1::CURVE_ORDER;
54use spark_cryptography::secret_sharing::secret_sharing::split_secret_with_proofs;
55use spark_cryptography::secret_sharing::secret_sharing::VerifiableSecretShare;
56use spark_cryptography::signing::aggregate_frost;
57use spark_cryptography::signing::sign_frost;
58use spark_protos::common::SigningCommitment as SparkOperatorCommitment;
59use spark_protos::frost::AggregateFrostRequest;
60use spark_protos::frost::AggregateFrostResponse;
61use spark_protos::frost::FrostSigningJob;
62use spark_protos::frost::SignFrostRequest;
63use spark_protos::frost::SignFrostResponse;
64use spark_protos::spark::LeafRefundTxSigningResult;
65use spark_protos::spark::NodeSignatures;
66use spark_protos::spark::RequestedSigningCommitments;
67use spark_protos::spark::SigningKeyshare;
68use spark_protos::spark::SigningResult;
69
70use bip32::DerivationPath;
72use k256::elliptic_curve::bigint::Encoding;
73use tonic::async_trait;
74
75#[derive(Clone)]
88pub struct DefaultSigner {
89 #[cfg(feature = "self-signing")]
91 master_seed: Vec<u8>,
92
93 #[cfg(not(feature = "self-signing"))]
95 pub wallet_connection_url: String,
96
97 #[cfg(not(feature = "self-signing"))]
99 pub wallet_connection_api_key: String,
100
101 pub derivation_index: Arc<RwLock<u32>>,
104
105 pub nonce_commitments: Arc<RwLock<HashbrownMap<String, String>>>,
108
109 pub public_keys_to_secret_keys: Arc<RwLock<HashbrownMap<String, String>>>,
112
113 pub network: SparkNetwork,
115}
116
117impl SparkSignerSecp256k1 for DefaultSigner {
118 fn get_identity_public_key(&self) -> Result<Vec<u8>, SparkSdkError> {
119 let master_seed_bytes = self.load_master_seed()?;
120 let master_seed = derive_master_key_from_path(&master_seed_bytes, &self.network)?;
121 let identity_key = derive_child_key(&master_seed, IDENTITY_KEYPAIR_INDEX)?;
122
123 Ok(identity_key.public_key().to_bytes().to_vec())
124 }
125
126 #[allow(private_interfaces)]
127 fn new_secp256k1_keypair(&self, keygen_method: KeygenMethod) -> Result<Vec<u8>, SparkSdkError> {
128 let secret_key_bytes = match keygen_method {
129 KeygenMethod::Uuid(uuid) => {
130 let parsed_uuid = Uuid::parse_str(&uuid)?;
131 let key = self.derive_signing_key_from_leaf(parsed_uuid)?;
132 key
133 }
134 KeygenMethod::Random => {
135 let key = SecretKey::new(&mut SparkRange);
136 key.secret_bytes().to_vec()
137 }
138 };
139
140 let public_key = secret_key_to_public_key(&secret_key_bytes)?;
141 self.insert_to_keypair_map(public_key.clone(), secret_key_bytes)?;
142
143 Ok(public_key)
144 }
145
146 fn insert_secp256k1_keypair_from_secret_key<T: AsRef<[u8]>>(
147 &self,
148 secret_key_bytes: T,
149 ) -> Result<Vec<u8>, SparkSdkError> {
150 let secp = Secp256k1::new();
151 let secret_key = SecretKey::from_slice(&secret_key_bytes.as_ref())?;
152 let public_key = PublicKey::from_secret_key(&secp, &secret_key);
153
154 self.insert_to_keypair_map(public_key.serialize(), secret_key.secret_bytes())?;
155
156 Ok(public_key.serialize().to_vec())
157 }
158
159 fn subtract_secret_keys_given_pubkeys<T: AsRef<[u8]>, U: AsRef<[u8]>>(
160 &self,
161 target_pubkey: T,
162 source_pubkey: U,
163 save_new_key: bool,
164 ) -> Result<Vec<u8>, SparkSdkError> {
165 if target_pubkey.as_ref() == source_pubkey.as_ref() {
166 return Err(SparkSdkError::InvalidInput(
167 "Target and source public keys are the same".into(),
168 ));
169 }
170
171 let target_secret_key = self.get_secret_key_from_pubkey(&target_pubkey)?;
172 let source_secret_key = self.get_secret_key_from_pubkey(&source_pubkey)?;
173
174 let result_secret_key = subtract_secret_keys(&target_secret_key, &source_secret_key)?;
175
176 let result_public_key = secret_key_to_public_key(&result_secret_key)?;
177
178 if save_new_key {
179 self.insert_to_keypair_map(result_public_key.clone(), result_secret_key)?;
180 }
181
182 Ok(result_public_key)
183 }
184
185 fn sensitive_expose_secret_key_from_pubkey<T: AsRef<[u8]>>(
186 &self,
187 public_key: T,
188 delete_after_exposing: bool,
189 ) -> Result<Vec<u8>, SparkSdkError> {
190 let secret_key = self.get_secret_key_from_pubkey(&public_key)?;
191
192 if delete_after_exposing {
193 self.evict_from_keypair_map(public_key)?;
194 }
195
196 Ok(secret_key)
197 }
198}
199
200#[cfg(test)]
201mod default_signer_secp256k1_tests {
202 use super::*;
203 use crate::error::SparkSdkError;
204
205 use bip32::Language;
206
207 type WrappedSigner = Arc<DefaultSigner>;
208
209 const TEST_NETWORK: SparkNetwork = SparkNetwork::Regtest;
210
211 async fn create_default_signer() -> Result<WrappedSigner, SparkSdkError> {
213 let rng = SparkRange;
214 let mnemonic = bip32::Mnemonic::random(rng, Language::English);
215 let master_seed = mnemonic.to_seed("").as_bytes().to_vec();
216 let signer = DefaultSigner::from_master_seed(&master_seed, TEST_NETWORK).await?;
217
218 Ok(signer)
219 }
220
221 #[tokio::test]
222 #[cfg(feature = "self-signing")]
223 async fn test_get_identity_public_key() -> Result<(), SparkSdkError> {
224 let signer = create_default_signer().await?;
225 signer
226 .get_identity_public_key()
227 .expect("failed to get identity pk");
228
229 Ok(())
230 }
231
232 #[tokio::test]
233 async fn test_new_secp256k1_keypair() -> Result<(), SparkSdkError> {
234 let signer = create_default_signer().await?;
235 let pubkey = signer.new_secp256k1_keypair(KeygenMethod::Random)?;
236 assert_eq!(pubkey.len(), 33);
237
238 let identity_pubkey = signer.get_identity_public_key()?;
239 assert_ne!(identity_pubkey, pubkey);
240
241 let current_derivation_index = signer.derivation_index.read().clone();
242 assert_eq!(current_derivation_index, IDENTITY_KEYPAIR_INDEX + 1);
243
244 Ok(())
245 }
246
247 #[tokio::test]
248 async fn test_insert_secp256k1_keypair_from_secret_key() -> Result<(), SparkSdkError> {
249 let signer = create_default_signer().await?;
250 let secret_key = {
252 let sk = SecretKey::new(&mut SparkRange);
253 sk.secret_bytes().to_vec()
254 };
255
256 let pubkey = signer.insert_secp256k1_keypair_from_secret_key(&secret_key)?;
257 assert_eq!(pubkey.len(), 33);
258
259 let retrieved_secret_key =
261 signer.sensitive_expose_secret_key_from_pubkey(&pubkey, false)?;
262 assert_eq!(retrieved_secret_key, secret_key);
263
264 Ok(())
265 }
266
267 #[tokio::test]
268 async fn test_subtract_secret_keys_given_pubkeys() -> Result<(), SparkSdkError> {
269 let signer = create_default_signer().await?;
270
271 let pubkey_1 = signer.new_secp256k1_keypair(KeygenMethod::Random)?;
273 let pubkey_2 = signer.new_secp256k1_keypair(KeygenMethod::Random)?;
274
275 let new_pubkey = signer.subtract_secret_keys_given_pubkeys(&pubkey_1, &pubkey_2, true)?;
277
278 assert_eq!(new_pubkey.len(), 33);
280
281 let _ = signer.sensitive_expose_secret_key_from_pubkey(&new_pubkey, false)?;
283
284 Ok(())
285 }
286
287 #[tokio::test]
288 #[ignore]
289 async fn test_sensitive_expose_secret_key_from_pubkey() -> Result<(), SparkSdkError> {
290 let signer = create_default_signer().await?;
291
292 let pubkey = signer.new_secp256k1_keypair(KeygenMethod::Random)?;
294 let secret_key = signer.sensitive_expose_secret_key_from_pubkey(&pubkey, false)?;
296 assert_eq!(secret_key.len(), 32);
297
298 let _ = signer.sensitive_expose_secret_key_from_pubkey(&pubkey, true)?;
300
301 let retrieve_again = signer.sensitive_expose_secret_key_from_pubkey(&pubkey, false);
303
304 match retrieve_again {
305 Err(SparkSdkError::SecretKeyNotFound(msg)) if msg.contains("Secret key not found") => {
306 ()
307 }
308 _ => panic!("Expected error: 'Secret key not found' after deletion"),
309 }
310
311 Ok(())
312 }
313}
314
315const SPLIT_SECRET_ERROR: &str = "Failed to split secret: ";
316#[async_trait]
317impl SparkSignerShamir for DefaultSigner {
318 fn split_with_verifiable_secret_sharing(
319 &self,
320 message: Vec<u8>,
321 threshold: usize,
322 num_shares: usize,
323 ) -> Result<Vec<VerifiableSecretShare>, SparkSdkError> {
324 let raw = hex_decode(CURVE_ORDER).unwrap();
327 let u = U256::from_be_slice(&raw);
328 let minus_one = u.saturating_sub(&U256::ONE); let field_modulus = minus_one.to_be_bytes().to_vec();
330
331 let shares = split_secret_with_proofs(&message, &field_modulus, threshold, num_shares)
333 .map_err(|e| SparkSdkError::InvalidInput(format!("{} {}", SPLIT_SECRET_ERROR, e)))?;
334
335 Ok(shares)
336 }
337
338 fn split_from_public_key_with_verifiable_secret_sharing(
339 &self,
340 public_key: Vec<u8>,
341 threshold: usize,
342 num_shares: usize,
343 ) -> Result<Vec<VerifiableSecretShare>, SparkSdkError> {
344 let secret_key = self.get_secret_key_from_pubkey(&public_key)?;
345 let shares =
346 self.split_with_verifiable_secret_sharing(secret_key, threshold, num_shares)?;
347 Ok(shares)
348 }
349}
350
351#[cfg(test)]
352mod default_signer_shamir_tests {
353 use super::*;
354 use crate::error::SparkSdkError;
355 use bip32::Language;
356 use rand::rngs::OsRng;
357 use std::sync::Arc;
358
359 type WrappedSigner = Arc<DefaultSigner>;
360
361 const TEST_NETWORK: SparkNetwork = SparkNetwork::Regtest;
362
363 async fn create_shamir_test_signer() -> Result<WrappedSigner, SparkSdkError> {
366 let rng = OsRng;
367 let mnemonic = bip32::Mnemonic::random(rng, Language::English);
368 let master_seed = mnemonic.to_seed("").as_bytes().to_vec();
369 let signer = DefaultSigner::from_master_seed(&master_seed, TEST_NETWORK).await?;
370 Ok(signer)
371 }
372
373 #[tokio::test]
374 #[cfg(feature = "self-signing")]
375 #[ignore]
376 async fn test_split_with_verifiable_secret_sharing() -> Result<(), SparkSdkError> {
377 let signer = create_shamir_test_signer().await?;
378 let message = b"hello world".to_vec();
379
380 let threshold = 2;
382 let num_shares = 3;
383
384 let shares = signer.split_with_verifiable_secret_sharing(message, threshold, num_shares)?;
386
387 assert_eq!(shares.len(), num_shares);
389
390 Ok(())
391 }
392}
393
394impl SparkSignerEcdsa for DefaultSigner {
395 fn sign_message_ecdsa_with_identity_key<T: AsRef<[u8]>>(
396 &self,
397 message: T,
398 apply_hashing: bool,
399 ) -> Result<Vec<u8>, SparkSdkError> {
400 let payload_hash = if apply_hashing {
403 sha256::digest(message.as_ref())
404 } else {
405 hex::encode(message.as_ref())
406 };
407 let message = Secp256k1Message::from_digest_slice(&hex::decode(&payload_hash).unwrap())?;
408
409 let identity_key = self.get_identity_secret_key()?;
411 let secret_key = SecretKey::from_slice(&identity_key).unwrap();
412
413 let secp = Secp256k1::new();
414 let signature = secp.sign_ecdsa(&message, &secret_key);
415
416 Ok(signature.serialize_der().to_vec())
417 }
418
419 fn sign_message_ecdsa_with_key<T: AsRef<[u8]>>(
420 &self,
421 message: T,
422 public_key_for_signing_key: &[u8],
423 apply_hashing: bool,
424 ) -> Result<Vec<u8>, SparkSdkError> {
425 let payload_hash = if apply_hashing {
426 sha256::digest(message.as_ref())
427 } else {
428 hex::encode(message.as_ref())
429 };
430
431 let secret_key_bytes = self.get_secret_key_from_pubkey(&public_key_for_signing_key)?;
432 let secret_key = SecretKey::from_slice(&secret_key_bytes).unwrap();
433 let secp = Secp256k1::new();
434
435 let message = Secp256k1Message::from_digest_slice(&hex::decode(&payload_hash).unwrap())?;
436
437 let signature = secp.sign_ecdsa(&message, &secret_key);
438 Ok(signature.serialize_der().to_vec())
439 }
440}
441
442#[cfg(test)]
443mod default_signer_ecdsa_tests {
444 use super::*;
445 use crate::common_types::types::Digest;
446 use crate::common_types::types::EcdsaSignature;
447 use crate::common_types::types::Secp256k1Message;
448 use crate::common_types::types::Sha256;
449 use bip32::Language;
450
451 type WrappedSigner = Arc<DefaultSigner>;
452
453 const TEST_NETWORK: SparkNetwork = SparkNetwork::Regtest;
454
455 async fn create_ecdsa_test_signer() -> Result<WrappedSigner, SparkSdkError> {
457 let rng = SparkRange;
458 let mnemonic = bip32::Mnemonic::random(rng, Language::English);
459 let master_seed = mnemonic.to_seed("").as_bytes().to_vec();
460 let signer = DefaultSigner::from_master_seed(&master_seed, TEST_NETWORK).await?;
461 Ok(signer)
462 }
463
464 #[tokio::test]
465 #[cfg(feature = "self-signing")]
466 async fn test_sign_message_ecdsa_with_identity_key() -> Result<(), SparkSdkError> {
467 let signer = create_ecdsa_test_signer().await?;
468
469 let message = b"this is a test message";
471 let signature_der = signer.sign_message_ecdsa_with_identity_key(message, true)?;
472
473 let sig_len = signature_der.len();
476 assert!(
477 (64..=72).contains(&sig_len),
478 "ECDSA signature must be between 64 and 72 bytes, got {}",
479 sig_len
480 );
481
482 let identity_pk = signer.get_identity_public_key()?;
484 let msg_hash = Sha256::digest(message);
485 let message_for_verify = Secp256k1Message::from_digest_slice(&msg_hash)
486 .map_err(|e| SparkSdkError::InvalidInput(format!("Failed to parse message: {e}")))?;
487
488 let parsed_sig = EcdsaSignature::from_der(&signature_der).map_err(|e| {
489 SparkSdkError::InvalidInput(format!("Failed to parse DER signature: {e}"))
490 })?;
491
492 let pubkey = PublicKey::from_slice(&identity_pk)
493 .map_err(|e| SparkSdkError::InvalidInput(format!("Failed to parse public key: {e}")))?;
494
495 let ctx = Secp256k1::verification_only();
497 ctx.verify_ecdsa(&message_for_verify, &parsed_sig, &pubkey)
498 .map_err(|_e| {
499 SparkSdkError::InvalidInput("Signature verification failed".to_string())
500 })?;
501
502 Ok(())
503 }
504
505 #[tokio::test]
506 #[cfg(feature = "self-signing")]
507 async fn test_sign_message_ecdsa_with_key() -> Result<(), SparkSdkError> {
508 use rand::thread_rng;
509
510 let secp = Secp256k1::new();
512 let mut rng = thread_rng();
513 let keypair = bitcoin::secp256k1::Keypair::new(&secp, &mut rng);
514
515 let signer = create_ecdsa_test_signer().await?;
517
518 signer.insert_to_keypair_map(
520 keypair.public_key().serialize(),
521 keypair.secret_key().secret_bytes(),
522 )?;
523
524 let message = b"this is a test message";
525 let signature_der =
526 signer.sign_message_ecdsa_with_key(message, &keypair.public_key().serialize(), true)?;
527
528 assert!(
529 (64..=72).contains(&signature_der.len()),
530 "ECDSA signature must be between 64 and 72 bytes, got {}",
531 signature_der.len()
532 );
533
534 Ok(())
535 }
536}
537
538impl SparkSignerEcies for DefaultSigner {
539 fn encrypt_secret_key_with_ecies<T, U>(
540 &self,
541 receiver_public_key: T,
542 pubkey_for_sk_to_encrypt: U,
543 ) -> Result<Vec<u8>, SparkSdkError>
544 where
545 T: AsRef<[u8]>,
546 U: AsRef<[u8]>,
547 {
548 let secret_key = self.get_secret_key_from_pubkey(&pubkey_for_sk_to_encrypt)?;
549 let ciphertext = ecies::encrypt(&receiver_public_key.as_ref(), &secret_key)
550 .map_err(|e| SparkSdkError::InvalidInput(format!("Failed to encrypt: {}", e)))?;
551
552 Ok(ciphertext)
553 }
554
555 fn decrypt_secret_key_with_ecies<T>(&self, ciphertext: T) -> Result<Vec<u8>, SparkSdkError>
556 where
557 T: AsRef<[u8]>,
558 {
559 let identity_secret_key = self.get_identity_secret_key()?;
561
562 ecies::decrypt(&identity_secret_key, ciphertext.as_ref())
563 .map_err(|e| SparkSdkError::InvalidInput(format!("Failed to decrypt: {}", e)))
564 }
565}
566
567#[cfg(test)]
568mod default_signer_ecies_tests {
569 use super::*;
570 use crate::error::SparkSdkError;
571 use bip32::Language;
572 use rand::rngs::OsRng;
573 use std::sync::Arc;
574
575 type WrappedSigner = Arc<DefaultSigner>;
576
577 const TEST_NETWORK: SparkNetwork = SparkNetwork::Regtest;
578
579 async fn create_ecies_test_signer() -> Result<WrappedSigner, SparkSdkError> {
581 let mnemonic = bip32::Mnemonic::random(OsRng, Language::English);
582 let master_seed = mnemonic.to_seed("").as_bytes().to_vec();
583 let signer = DefaultSigner::from_master_seed(&master_seed, TEST_NETWORK).await?;
584 Ok(signer)
585 }
586
587 #[tokio::test]
588 #[cfg(feature = "self-signing")]
589 async fn test_ecies_encrypt_decrypt_round_trip() -> Result<(), SparkSdkError> {
590 use crate::signer::traits::secp256k1::KeygenMethod;
593 let signer = create_ecies_test_signer().await?;
594
595 let ephemeral_pubkey = signer.new_secp256k1_keypair(KeygenMethod::Random)?;
597 let ephemeral_privkey =
598 signer.sensitive_expose_secret_key_from_pubkey(&ephemeral_pubkey, false)?;
599
600 let receiver_pubkey = signer.get_identity_public_key()?;
602
603 let ciphertext =
605 signer.encrypt_secret_key_with_ecies(&receiver_pubkey, &ephemeral_pubkey)?;
606
607 let decrypted_key = signer.decrypt_secret_key_with_ecies(&ciphertext)?;
609
610 assert_eq!(
612 decrypted_key, ephemeral_privkey,
613 "Decrypted key did not match the original"
614 );
615
616 Ok(())
617 }
618}
619
620#[async_trait]
621impl SparkSignerFrost for DefaultSigner {
622 fn new_frost_signing_noncepair(&self) -> Result<FrostSigningCommitments, SparkSdkError> {
623 let mut rng = SparkRange;
624 let binding_sk = SecretKey::new(&mut rng);
625 let hiding_sk = SecretKey::new(&mut rng);
626
627 let binding = FrostNonce::deserialize(&binding_sk.secret_bytes()).unwrap();
628 let hiding = FrostNonce::deserialize(&hiding_sk.secret_bytes()).unwrap();
629
630 let nonces = frost_secp256k1_tr::round1::SigningNonces::from_nonces(hiding, binding);
631 let commitments = nonces.commitments();
632
633 let nonces_bytes = nonces.serialize().unwrap();
634 let commitment_bytes = commitments.serialize().unwrap();
635
636 self.insert_to_noncepair_map(commitment_bytes, nonces_bytes)?;
637
638 Ok(commitments.clone())
639 }
640
641 fn sensitive_expose_nonces_from_commitments<T>(
642 &self,
643 signing_commitments: &T,
644 ) -> Result<FrostSigningNonces, SparkSdkError>
645 where
646 T: AsRef<[u8]>,
647 {
648 let signing_commitment_hex = hex_encode(signing_commitments.as_ref());
649 let signing_nonces = self
650 .nonce_commitments
651 .read()
652 .get(&signing_commitment_hex)
653 .cloned()
654 .ok_or_else(|| {
655 SparkSdkError::InvalidInput("Nonce commitments not found".to_string())
656 })?;
657
658 let signing_nonces_bytes = hex_decode(signing_nonces).unwrap();
659 let signing_nonces = FrostSigningNonces::deserialize(&signing_nonces_bytes).unwrap(); Ok(signing_nonces)
661 }
662
663 fn sensitive_create_if_not_found_expose_nonces_from_commitments(
664 &self,
665 signing_commitments: Option<&[u8]>,
666 ) -> Result<FrostSigningNonces, SparkSdkError> {
667 let commitments = if signing_commitments.is_none() {
668 let commitments = self.new_frost_signing_noncepair()?;
669 commitments.serialize().unwrap().clone()
670 } else {
671 signing_commitments.unwrap().to_vec()
672 };
673 let signing_nonces = self.sensitive_expose_nonces_from_commitments(&commitments)?;
674 Ok(signing_nonces)
675 }
676}
677
678impl SparkSignerFrostSigning for DefaultSigner {
681 fn sign_frost(
682 &self,
683 signing_jobs: Vec<FrostSigningJob>,
684 ) -> Result<SignFrostResponse, SparkSdkError> {
685 let marhsalized_request = SignFrostRequest {
686 signing_jobs,
687 role: FROST_USER_SIGNING_ROLE,
688 };
689 sign_frost(&marhsalized_request)
690 .map_err(|e| SparkSdkError::FrostSigningError(e.to_string()))
691 }
692
693 fn aggregate_frost(
694 &self,
695 request: AggregateFrostRequest,
696 ) -> Result<AggregateFrostResponse, SparkSdkError> {
697 aggregate_frost(&request).map_err(|e| SparkSdkError::FrostAggregationError(e.to_string()))
698 }
699
700 fn sign_created_tree_in_bfs_order(
701 &self,
702 tx: Transaction,
703 vout: u32,
704 internal_tree_root: Arc<RwLock<DepositAddressTree>>,
705 request_tree_root: spark_protos::spark::CreationNode,
706 creation_result_tree_root: spark_protos::spark::CreationResponseNode,
707 ) -> Result<(Vec<NodeSignatures>, Vec<Vec<u8>>), SparkSdkError> {
708 #[derive(Clone)]
709 struct QueueItem {
710 parent_tx: Transaction,
711 vout: u32,
712 internal_node: Arc<RwLock<DepositAddressTree>>,
713 creation_node: spark_protos::spark::CreationNode,
714 creation_response_node: spark_protos::spark::CreationResponseNode,
715 }
716
717 let mut queue = std::collections::VecDeque::new();
718 let mut node_signatures = vec![];
719
720 queue.push_back(QueueItem {
721 parent_tx: tx,
722 vout,
723 internal_node: internal_tree_root,
724 creation_node: request_tree_root,
725 creation_response_node: creation_result_tree_root,
726 });
727
728 let mut signing_public_keys = vec![];
729
730 while let Some(current) = queue.pop_front() {
731 let node_prevout_index = current.vout as usize;
732 let node_signing_input_index = 0;
733
734 let internal_node = current.internal_node.read().clone();
736 let creation_node = current.creation_node.clone();
737 let node_signing_job = creation_node.node_tx_signing_job.clone().unwrap();
738 let serialized_node_transaction = node_signing_job.raw_tx;
739 let user_node_commitments = node_signing_job.signing_nonce_commitment.clone().unwrap();
740 let (spark_node_signature_shares, spark_node_public_shares, _, spark_node_commitments) =
741 get_signature_data_from_signing_result(
742 ¤t.creation_response_node.node_tx_signing_result,
743 )?;
744
745 let signing_job = self.prepare_frost_signing_job(
746 internal_node.signing_public_key.clone(),
747 Some(serialize_marshalized_frost_commitments(
748 &user_node_commitments,
749 )?),
750 spark_node_commitments.clone(),
751 serialized_node_transaction.clone(),
752 node_prevout_index,
753 node_signing_input_index,
754 ¤t.parent_tx.clone(),
755 vec![],
756 internal_node.verification_key.clone().unwrap(),
757 )?;
758
759 let node_signature = self.sign_frost(vec![signing_job.clone()]).unwrap();
761 let user_node_signature_share = node_signature.results[&signing_job.job_id]
762 .signature_share
763 .clone();
764
765 let aggregate_response = self
766 .aggregate_frost(spark_protos::frost::AggregateFrostRequest {
767 message: signing_job.message,
768 signature_shares: spark_node_signature_shares,
769 public_shares: spark_node_public_shares,
770 verifying_key: signing_job.verifying_key.clone(),
771 commitments: spark_node_commitments,
772 user_commitments: signing_job.user_commitments,
773 user_public_key: signing_job.verifying_key,
774 user_signature_share: user_node_signature_share,
775 adaptor_public_key: vec![],
776 })
777 .unwrap();
778
779 let mut node_signature = spark_protos::spark::NodeSignatures {
780 node_id: current.creation_response_node.node_id.clone(),
781 node_tx_signature: aggregate_response.signature,
782 ..Default::default()
783 };
784
785 if let Some(refund_signing_job) = current.creation_node.refund_tx_signing_job {
787 let refund_prevout_index = 0;
789 let refund_signing_input_index = 0 as usize;
790
791 let serialized_refund_transaction = refund_signing_job.raw_tx;
793 let user_refund_commitments = refund_signing_job.signing_nonce_commitment.unwrap();
794 let (
795 spark_refund_signature_shares,
796 spark_refund_public_shares,
797 _,
798 spark_refund_commitments,
799 ) = get_signature_data_from_signing_result(
800 ¤t.creation_response_node.refund_tx_signing_result,
801 )?;
802
803 let refund_signing_job = self.prepare_frost_signing_job(
804 internal_node.signing_public_key,
805 Some(serialize_marshalized_frost_commitments(
806 &user_refund_commitments,
807 )?),
808 spark_refund_commitments.clone(),
809 serialized_refund_transaction,
810 refund_prevout_index,
811 refund_signing_input_index,
812 &bitcoin_tx_from_bytes(&serialized_node_transaction)?,
813 vec![],
814 internal_node.verification_key.clone().unwrap(),
815 )?;
816
817 let refund_signature = self.sign_frost(vec![refund_signing_job.clone()])?;
818 let user_refund_signature_share = refund_signature.results
819 [&refund_signing_job.job_id]
820 .signature_share
821 .clone();
822
823 let aggregate_response = self
824 .aggregate_frost(spark_protos::frost::AggregateFrostRequest {
825 message: refund_signing_job.message,
826 signature_shares: spark_refund_signature_shares,
827 public_shares: spark_refund_public_shares,
828 verifying_key: refund_signing_job.verifying_key.clone(),
829 commitments: spark_refund_commitments,
830 user_commitments: refund_signing_job.user_commitments,
831 user_public_key: refund_signing_job.verifying_key,
832 user_signature_share: user_refund_signature_share,
833 adaptor_public_key: vec![],
834 })
835 .unwrap();
836
837 node_signature.refund_tx_signature = aggregate_response.signature;
838 }
839 node_signatures.push(node_signature);
840
841 for (i, child) in current.creation_node.children.into_iter().enumerate() {
843 queue.push_back(QueueItem {
844 parent_tx: bitcoin_tx_from_bytes(&serialized_node_transaction)?,
845 vout: i as u32,
846 internal_node: current.internal_node.read().children[i].clone(),
847 creation_node: child,
848 creation_response_node: current.creation_response_node.children[i].clone(),
849 });
850 }
851
852 signing_public_keys.push(current.internal_node.read().signing_public_key.clone());
853 }
854
855 Ok((node_signatures, signing_public_keys))
856 }
857
858 fn sign_transfer_refunds(
859 &self,
860 leaf_data_map: &HashMap<String, LeafRefundSigningData>,
861 operator_signing_results: &Vec<LeafRefundTxSigningResult>,
862 adaptor_public_key: Vec<u8>,
863 ) -> Result<Vec<spark_protos::spark::NodeSignatures>, SparkSdkError> {
864 let mut user_signing_jobs = Vec::new();
865 let mut job_to_aggregate_request_map = HashMap::new();
866 let mut job_to_leaf_map = HashMap::new();
867
868 for operator_result in operator_signing_results {
869 let signing_input_index = 0;
870 let prevout_to_use = 0;
871
872 let leaf_data = leaf_data_map
874 .get(&operator_result.leaf_id)
875 .ok_or_else(|| SparkSdkError::InvalidInput("Leaf data not found".into()))?;
876
877 let refund_tx_ = leaf_data.refund_tx.as_ref().unwrap();
879 let serialized_refund_tx = serialize_bitcoin_transaction(&refund_tx_)?;
880
881 let (
882 spark_refund_signature_shares,
883 spark_refund_public_shares,
884 _,
885 spark_refund_commitments,
886 ) = get_signature_data_from_signing_result(&operator_result.refund_tx_signing_result)?;
887
888 let commitment_hiding = leaf_data.commitment.hiding.clone();
889 let commitment_binding = leaf_data.commitment.binding.clone();
890 let signing_commitments = frost_secp256k1_tr::round1::SigningCommitments::new(
891 FrostNonceCommitment::deserialize(&commitment_hiding).unwrap(),
892 FrostNonceCommitment::deserialize(&commitment_binding).unwrap(),
893 );
894
895 let signing_job = self.prepare_frost_signing_job(
897 leaf_data.signing_public_key.clone(),
898 Some(signing_commitments.serialize().unwrap()),
899 spark_refund_commitments.clone(),
900 serialized_refund_tx,
901 prevout_to_use,
902 signing_input_index,
903 &leaf_data.tx,
904 adaptor_public_key.clone(),
905 operator_result.verifying_key.clone(),
906 )?;
907 let signing_job_id = signing_job.job_id.clone();
908 user_signing_jobs.push(signing_job.clone());
909
910 job_to_leaf_map.insert(signing_job_id.clone(), operator_result.leaf_id.clone());
911
912 let user_public_key = PublicKey::from_slice(&leaf_data.signing_public_key)
914 .unwrap()
915 .serialize()
916 .to_vec();
917
918 job_to_aggregate_request_map.insert(
919 signing_job_id.clone(),
920 AggregateFrostRequest {
921 message: signing_job.message,
922 signature_shares: spark_refund_signature_shares,
923 public_shares: spark_refund_public_shares,
924 verifying_key: operator_result.verifying_key.clone(),
925 commitments: spark_refund_commitments,
926 user_commitments: signing_job.user_commitments,
927 user_public_key,
928 user_signature_share: vec![],
929 adaptor_public_key: vec![],
930 },
931 );
932 }
933
934 let user_signatures = self.sign_frost(user_signing_jobs)?;
936
937 let mut node_signatures = Vec::new();
939 for (job_id, user_signature) in user_signatures.results {
940 let mut request = job_to_aggregate_request_map
941 .remove(&job_id)
942 .ok_or_else(|| SparkSdkError::InvalidInput("Job ID not found".into()))?;
943
944 request.user_signature_share = user_signature.signature_share;
945
946 let response = match self.aggregate_frost(request) {
947 Ok(response) => response,
948 Err(e) => {
949 return Err(SparkSdkError::InvalidInput(format!(
950 "Failed to aggregate refund: {}",
951 e
952 )));
953 }
954 };
955
956 node_signatures.push(spark_protos::spark::NodeSignatures {
957 node_id: job_to_leaf_map[&job_id].clone(),
958 refund_tx_signature: response.signature,
959 node_tx_signature: vec![],
960 });
961 }
962
963 Ok(node_signatures)
964 }
965
966 fn sign_for_lightning_swap(
967 &self,
968 leaves: &Vec<LeafKeyTweak>,
969 signing_commitments: &Vec<RequestedSigningCommitments>,
970 receiver_identity_pubkey: PublicKey,
971 ) -> Result<
972 (
973 SignFrostResponse,
974 Vec<Vec<u8>>,
975 Vec<ProtoSigningCommitments>,
976 ),
977 SparkSdkError,
978 > {
979 let mut signing_jobs = Vec::new();
980 let mut refund_txs = vec![];
981
982 let mut user_commitments = Vec::with_capacity(leaves.len());
983
984 for (i, leaf) in leaves.iter().enumerate() {
985 let node_tx = bitcoin_tx_from_bytes(&leaf.leaf.node_tx)?;
986
987 let node_outpoint = bitcoin::OutPoint {
988 txid: node_tx.compute_txid(),
989 vout: 0,
990 };
991
992 let current_refund_tx = bitcoin_tx_from_bytes(&leaf.leaf.refund_tx)?;
993
994 let next_sequence = next_sequence(current_refund_tx.input[0].sequence.0);
995
996 let amount_sats = node_tx.output[0].value;
997
998 let refund_tx = create_refund_tx(
999 next_sequence,
1000 node_outpoint,
1001 amount_sats,
1002 &receiver_identity_pubkey,
1003 self.network.to_bitcoin_network(),
1004 )?;
1005
1006 let refund_tx_buf = serialize_bitcoin_transaction(&refund_tx)?;
1007 refund_txs.push(refund_tx_buf);
1008
1009 let sighash = sighash_from_tx(&refund_tx, 0, &node_tx.output[0])?;
1010
1011 let user_commitment = self.new_frost_signing_noncepair()?;
1012 let user_nonce = self
1013 .sensitive_expose_nonces_from_commitments(&user_commitment.serialize().unwrap())?;
1014
1015 let marshalized_frost_nonces = marshal_frost_nonces(&user_nonce)?;
1016 let marshalized_frost_commitments = marshal_frost_commitments(&user_commitment)?;
1017
1018 user_commitments.push(marshalized_frost_commitments.clone());
1019
1020 let signing_secret_key =
1021 self.sensitive_expose_secret_key_from_pubkey(&leaf.new_signing_public_key, false)?;
1022 let key_package = create_user_key_package(&signing_secret_key);
1023
1024 signing_jobs.push(FrostSigningJob {
1025 job_id: leaf.leaf.id.clone(),
1026 message: sighash.to_vec(),
1027 key_package: Some(key_package),
1028 verifying_key: leaf.leaf.verifying_public_key.clone(),
1029 nonce: Some(marshalized_frost_nonces),
1030 user_commitments: Some(marshalized_frost_commitments),
1031 commitments: signing_commitments[i].signing_nonce_commitments.clone(),
1032 adaptor_public_key: vec![],
1033 });
1034 }
1035
1036 let signing_results = self.sign_frost(signing_jobs)?;
1038
1039 Ok((signing_results, refund_txs, user_commitments))
1040 }
1041}
1042
1043#[async_trait]
1044impl SparkSigner for DefaultSigner {
1045 type WrappedSigner = Arc<Self>;
1046
1047 async fn from_mnemonic(
1051 mnemonic: &str,
1052 network: SparkNetwork,
1053 ) -> Result<Self::WrappedSigner, SparkSdkError> {
1054 let seed = bip32::Mnemonic::new(mnemonic, bip32::Language::English)
1056 .map_err(|e| SparkSdkError::InvalidInput(e.to_string()))?
1057 .to_seed("");
1058 let seed_bytes = seed.as_bytes().to_vec();
1059
1060 Self::from_master_seed(&seed_bytes, network).await
1061 }
1062
1063 #[cfg(feature = "self-signing")]
1064 async fn from_master_seed(
1065 master_seed: &[u8],
1066 network: SparkNetwork,
1067 ) -> Result<Self::WrappedSigner, SparkSdkError> {
1068 let derivation_index = IDENTITY_KEYPAIR_INDEX + 1; let wrapped_derivation_index = Arc::new(RwLock::new(derivation_index));
1070
1071 let nonce_commitments = HashbrownMap::new();
1072 let public_keys_to_secret_keys = HashbrownMap::new();
1073
1074 let commitments_map = Arc::new(RwLock::new(nonce_commitments));
1075 let public_keys_map = Arc::new(RwLock::new(public_keys_to_secret_keys));
1076
1077 Ok(Arc::new(Self {
1078 master_seed: master_seed.to_vec(),
1079 derivation_index: wrapped_derivation_index,
1080 nonce_commitments: commitments_map,
1081 public_keys_to_secret_keys: public_keys_map,
1082 network,
1083 }))
1084 }
1085
1086 #[cfg(not(feature = "self-signing"))]
1087 async fn new_remote(
1088 signer_url: &str,
1089 wallet_id: &str,
1090 user_public_key_hex: &str,
1091 ) -> Result<Self::WrappedSigner, SparkSdkError> {
1092 todo!()
1093 }
1094
1095 fn derive_signing_key_from_leaf(&self, leaf_id: Uuid) -> Result<Vec<u8>, SparkSdkError> {
1096 let master_seed = derive_master_key_from_path(&self.master_seed, &self.network)?;
1097 let child_index = derive_child_index_from_leaf(leaf_id);
1098
1099 let key = derive_child_key(&master_seed, child_index)?;
1100 let secret_key = key.private_key().to_bytes().to_vec();
1101
1102 Ok(secret_key)
1103 }
1104}
1105
1106const INVALID_SECRET_KEY_ERROR: &str =
1107 "Could not find secret key in the signer space. Public key used as the index: ";
1108impl DefaultSigner {
1109 pub(crate) fn load_master_seed(&self) -> Result<Vec<u8>, SparkSdkError> {
1110 Ok(self.master_seed.clone())
1111 }
1112
1113 pub(crate) fn get_identity_secret_key(&self) -> Result<Vec<u8>, SparkSdkError> {
1114 let master_seed_bytes = self.load_master_seed()?;
1115 let master_seed = derive_master_key_from_path(&master_seed_bytes, &self.network)?;
1116 let identity_key = derive_child_key(&master_seed, IDENTITY_KEYPAIR_INDEX)?;
1117
1118 Ok(identity_key.private_key().to_bytes().to_vec())
1119 }
1120
1121 pub(crate) fn get_secret_key_from_pubkey<T: AsRef<[u8]>>(
1122 &self,
1123 public_key: &T,
1124 ) -> Result<Vec<u8>, SparkSdkError> {
1125 let public_key_hex = hex_encode(public_key);
1126 let secret_key_hex = self
1127 .public_keys_to_secret_keys
1128 .read()
1129 .get(&public_key_hex)
1130 .cloned()
1131 .ok_or_else(|| {
1132 SparkSdkError::InvalidInput(format!(
1133 "{} {}",
1134 INVALID_SECRET_KEY_ERROR, public_key_hex
1135 ))
1136 })?;
1137
1138 Ok(hex_decode(&secret_key_hex).unwrap())
1139 }
1140
1141 pub(crate) fn insert_to_keypair_map<T: AsRef<[u8]>, U: AsRef<[u8]>>(
1142 &self,
1143 public_key: T,
1144 secret_key: U,
1145 ) -> Result<(), SparkSdkError> {
1146 let public_key_hex = hex_encode(public_key);
1147 let secret_key_hex = hex_encode(secret_key);
1148 self.public_keys_to_secret_keys
1149 .write()
1150 .insert(public_key_hex, secret_key_hex);
1151
1152 Ok(())
1153 }
1154
1155 pub(crate) fn evict_from_keypair_map<T: AsRef<[u8]>>(
1156 &self,
1157 public_key: T,
1158 ) -> Result<(), SparkSdkError> {
1159 let public_key_hex = hex_encode(public_key);
1160 self.public_keys_to_secret_keys
1161 .write()
1162 .remove(&public_key_hex);
1163
1164 Ok(())
1165 }
1166
1167 pub(crate) fn insert_to_noncepair_map<T: AsRef<[u8]>, U: AsRef<[u8]>>(
1168 &self,
1169 nonce_commitment: T,
1170 nonce: U,
1171 ) -> Result<(), SparkSdkError> {
1172 let nonce_commitment_hex = hex_encode(nonce_commitment);
1173 let nonce_hex = hex_encode(nonce);
1174
1175 self.nonce_commitments
1176 .write()
1177 .insert(nonce_commitment_hex.clone(), nonce_hex.clone());
1178
1179 Ok(())
1180 }
1181
1182 fn prepare_frost_signing_job<T: AsRef<[u8]>>(
1212 &self,
1213 signing_public_key: T,
1214 user_frost_commitments: Option<Vec<u8>>,
1215 spark_frost_commitments: HashMap<String, SparkOperatorCommitment>,
1216 serialized_bitcoin_tx: Vec<u8>,
1217 prevout_to_use: usize,
1218 signing_input_index: usize,
1219 parent_tx: &Transaction,
1220 adaptor_public_key: Vec<u8>,
1221 verifying_key: Vec<u8>,
1222 ) -> Result<FrostSigningJob, SparkSdkError> {
1223 let job_id = generate_signing_job_id();
1224
1225 let signing_secret_key =
1227 self.sensitive_expose_secret_key_from_pubkey(signing_public_key, false)?;
1228 let frost_nonces = self.sensitive_create_if_not_found_expose_nonces_from_commitments(
1229 user_frost_commitments.as_deref(),
1230 )?;
1231 let marshalized_frost_nonces = marshal_frost_nonces(&frost_nonces)?;
1232
1233 let marshalized_frost_commitments = marshal_frost_commitments(&frost_nonces.commitments())?;
1234
1235 let key_package = create_user_key_package(&signing_secret_key);
1236 let transaction = bitcoin_tx_from_bytes(&serialized_bitcoin_tx)?;
1237
1238 let sighash = sighash_from_tx(
1239 &transaction,
1240 signing_input_index,
1241 &parent_tx.output[prevout_to_use],
1242 )?;
1243 let message = sighash.to_vec();
1244
1245 Ok(FrostSigningJob {
1246 job_id,
1247 message,
1248 key_package: Some(key_package),
1249 verifying_key,
1250 nonce: Some(marshalized_frost_nonces),
1251 commitments: spark_frost_commitments,
1252 user_commitments: Some(marshalized_frost_commitments),
1253 adaptor_public_key: adaptor_public_key,
1254 })
1255 }
1256}
1257
1258fn secret_key_to_public_key<T: AsRef<[u8]>>(secret_key: T) -> Result<Vec<u8>, SparkSdkError> {
1259 let secp = Secp256k1::new();
1260 let secret_key = parse_secret_key(&secret_key.as_ref().to_vec())?;
1261 let public_key = PublicKey::from_secret_key(&secp, &secret_key);
1262
1263 Ok(public_key.serialize().to_vec())
1264}
1265
1266const INVALID_DERIVATION_PATH_ERROR: &str = "failed to derive key for path: ";
1268fn derive_master_key_from_path(
1269 master_seed: &[u8],
1270 network: &SparkNetwork,
1271) -> Result<XPrv, SparkSdkError> {
1272 let path = match network {
1273 SparkNetwork::Mainnet => "m/44'/0'/0'/1/0",
1274 SparkNetwork::Regtest => "m/44'/1'/0'/1/0",
1275 };
1276
1277 let derivation_path: DerivationPath = path
1278 .parse()
1279 .map_err(|e| SparkSdkError::InvalidInput(format!("Invalid derivation path: {}", e)))?;
1280
1281 XPrv::derive_from_path(master_seed, &derivation_path).map_err(|e| {
1282 SparkSdkError::InvalidInput(format!("{} {}", INVALID_DERIVATION_PATH_ERROR, e))
1283 })
1284}
1285
1286const INVALID_CHILD_INDEX_ERROR: &str =
1288 "failed to create child index in the derivation path for index: ";
1289fn derive_child_key(master_seed: &XPrv, child_index: u32) -> Result<XPrv, SparkSdkError> {
1290 let child_number = ChildNumber::new(child_index, true).map_err(|_| {
1291 SparkSdkError::InvalidInput(format!("{} {}", INVALID_CHILD_INDEX_ERROR, child_index))
1292 })?;
1293
1294 let child_key = master_seed.derive_child(child_number).map_err(|e| {
1295 SparkSdkError::InvalidInput(format!("{} {}", INVALID_DERIVATION_PATH_ERROR, e))
1296 })?;
1297 Ok(child_key)
1298}
1299
1300pub(crate) fn create_user_key_package(
1310 signing_secret_key: &[u8],
1311) -> spark_protos::frost::KeyPackage {
1312 let user_identifier = FROST_USER_IDENTIFIER;
1313 let secp = Secp256k1::new();
1314 let secret_key = SecretKey::from_slice(signing_secret_key).unwrap();
1315 let public_key = PublicKey::from_secret_key(&secp, &secret_key);
1316
1317 let mut public_shares = HashMap::new();
1318 public_shares.insert(user_identifier.to_string(), public_key.serialize().to_vec());
1319
1320 spark_protos::frost::KeyPackage {
1321 identifier: user_identifier.to_string(),
1322 secret_share: signing_secret_key.to_vec(),
1323 public_shares,
1324 public_key: public_key.serialize().to_vec(),
1325 min_signers: FROST_USER_KEY_PACKAGE_MIN_SIGNERS,
1326 }
1327}
1328
1329use spark_protos::common::SigningCommitment as ProtoSigningCommitments;
1330pub(crate) fn marshal_frost_commitments(
1331 commitments: &FrostSigningCommitments,
1332) -> Result<ProtoSigningCommitments, SparkSdkError> {
1333 let hiding = commitments.hiding().serialize().unwrap();
1334 let binding = commitments.binding().serialize().unwrap();
1335
1336 Ok(ProtoSigningCommitments { hiding, binding })
1337}
1338
1339pub(crate) fn _unmarshal_frost_commitments(
1340 commitment: &ProtoSigningCommitments,
1341) -> Result<FrostSigningCommitments, SparkSdkError> {
1342 let hiding = commitment.hiding.clone();
1343 let binding = commitment.binding.clone();
1344
1345 let hiding_nonce = FrostNonceCommitment::deserialize(&hiding).unwrap();
1346 let binding_nonce = FrostNonceCommitment::deserialize(&binding).unwrap();
1347
1348 Ok(FrostSigningCommitments::new(hiding_nonce, binding_nonce))
1349}
1350
1351use spark_protos::frost::SigningNonce as ProtoSigningNonce;
1352pub(crate) fn marshal_frost_nonces(
1353 nonce: &FrostSigningNonces,
1354) -> Result<ProtoSigningNonce, SparkSdkError> {
1355 let hiding = nonce.hiding().serialize();
1356 let binding = nonce.binding().serialize();
1357
1358 Ok(ProtoSigningNonce { hiding, binding })
1359}
1360
1361pub(crate) fn _unmarshal_frost_nonces(
1362 nonce: &ProtoSigningNonce,
1363) -> Result<FrostSigningNonces, SparkSdkError> {
1364 let hiding_nonce = FrostNonce::deserialize(&nonce.hiding).unwrap();
1365 let binding_nonce = FrostNonce::deserialize(&nonce.binding).unwrap();
1366
1367 Ok(FrostSigningNonces::from_nonces(hiding_nonce, binding_nonce))
1368}
1369
1370fn serialize_marshalized_frost_commitments(
1371 commitments: &ProtoSigningCommitments,
1372) -> Result<Vec<u8>, SparkSdkError> {
1373 let hiding = commitments.hiding.clone();
1374 let binding = commitments.binding.clone();
1375
1376 let prefix_hex = "00230f8ab3";
1377 let hiding_hex = hex_encode(&hiding);
1378 let binding_hex = hex_encode(&binding);
1379
1380 let commitments_hex = format!("{}{}{}", prefix_hex, hiding_hex, binding_hex);
1381 Ok(hex_decode(&commitments_hex).unwrap())
1382}
1383
1384#[cfg(test)]
1385mod frost_to_proto_conversions_test {
1386 use super::*;
1387
1388 #[test]
1389 fn test_frost_to_proto_conversions() {
1390 let hiding_sk = SecretKey::new(&mut rand::thread_rng());
1391 let binding_sk = SecretKey::new(&mut rand::thread_rng());
1392 let hiding_sk_bytes = hiding_sk.secret_bytes().to_vec();
1393 let binding_sk_bytes = binding_sk.secret_bytes().to_vec();
1394
1395 let hiding_nonce = FrostNonce::deserialize(&hiding_sk_bytes).unwrap();
1396 let binding_nonce = FrostNonce::deserialize(&binding_sk_bytes).unwrap();
1397
1398 let frost_nonces = FrostSigningNonces::from_nonces(hiding_nonce, binding_nonce);
1400 let frost_commitments = frost_nonces.commitments();
1401
1402 let marshalized_nonces = marshal_frost_nonces(&frost_nonces).unwrap();
1404 let unmarshalized_nonces = _unmarshal_frost_nonces(&marshalized_nonces).unwrap();
1405
1406 let marshalized_commitments = marshal_frost_commitments(&frost_commitments).unwrap();
1408 let unmarshalized_commitments =
1409 _unmarshal_frost_commitments(&marshalized_commitments).unwrap();
1410
1411 assert_eq!(frost_nonces, unmarshalized_nonces);
1413 assert_eq!(frost_commitments, &unmarshalized_commitments);
1414 }
1415}
1416
1417fn generate_signing_job_id() -> String {
1418 Uuid::now_v7().to_string()
1419}
1420
1421type SignatureSharesType = HashMap<String, Vec<u8>>;
1422type PublicSharesType = HashMap<String, Vec<u8>>;
1423type SigningKeyshareType = Option<SigningKeyshare>;
1424type SigningCommitmentsType = HashMap<String, SparkOperatorCommitment>;
1425
1426fn get_signature_data_from_signing_result(
1442 signing_result: &Option<SigningResult>,
1443) -> Result<
1444 (
1445 SignatureSharesType,
1446 PublicSharesType,
1447 SigningKeyshareType,
1448 SigningCommitmentsType,
1449 ),
1450 SparkSdkError,
1451> {
1452 let signing_result = signing_result.clone().unwrap();
1453 let signature_shares = signing_result.signature_shares;
1454 let public_shares = signing_result.public_keys;
1455 let signing_keyshare = signing_result.signing_keyshare;
1456 let commitments = signing_result.signing_nonce_commitments;
1457
1458 Ok((
1459 signature_shares,
1460 public_shares,
1461 signing_keyshare,
1462 commitments,
1463 ))
1464}
1465
1466fn create_refund_tx(
1467 sequence: u32,
1468 node_outpoint: bitcoin::OutPoint,
1469 amount_sats: bitcoin::Amount,
1470 receiving_pubkey: &bitcoin::secp256k1::PublicKey,
1471 network: bitcoin::Network,
1472) -> Result<bitcoin::Transaction, SparkSdkError> {
1473 let mut new_refund_tx = bitcoin::Transaction {
1474 version: bitcoin::transaction::Version::TWO,
1475 lock_time: bitcoin::absolute::LockTime::ZERO,
1476 input: vec![],
1477 output: vec![],
1478 };
1479
1480 new_refund_tx.input.push(bitcoin::TxIn {
1481 previous_output: node_outpoint,
1482 script_sig: bitcoin::ScriptBuf::default(),
1483 sequence: bitcoin::Sequence(sequence),
1484 witness: bitcoin::Witness::default(),
1485 });
1486
1487 let secp: bitcoin::key::Secp256k1<bitcoin::secp256k1::All> =
1488 bitcoin::secp256k1::Secp256k1::new();
1489 let addr = bitcoin::Address::p2tr(&secp, receiving_pubkey.x_only_public_key().0, None, network);
1490 let refund_pk_script = addr.script_pubkey();
1491
1492 new_refund_tx.output.push(bitcoin::TxOut {
1493 value: amount_sats,
1494 script_pubkey: refund_pk_script,
1495 });
1496
1497 new_refund_tx.output.push(ephemeral_anchor_output());
1498
1499 Ok(new_refund_tx)
1500}
1501
1502use sha2::{Digest, Sha256};
1503fn derive_child_index_from_leaf(leaf_id: Uuid) -> u32 {
1504 let mut hasher = Sha256::new();
1506 hasher.update(leaf_id.as_bytes());
1507 let hash = hasher.finalize();
1508
1509 let mut amount: u32 = 0;
1511 for i in 0..8 {
1512 let start = i * 4;
1513 let end = start + 4;
1514 let chunk = &hash[start..end];
1515 let value = u32::from_be_bytes(chunk.try_into().unwrap());
1516 amount = amount.wrapping_add(value) % 0x80000000;
1517 }
1518
1519 amount
1520 }