spark_sdk/wallet/internal_handlers/implementations/
transfer.rs

1use crate::common_types::types::frost::FrostSigningCommitments;
2use crate::common_types::types::AbsoluteLockTime;
3use crate::common_types::types::EcdsaSignature;
4use crate::common_types::types::OutPoint;
5use crate::common_types::types::PublicKey;
6use crate::common_types::types::ScriptBuf;
7use crate::common_types::types::Secp256k1;
8use crate::common_types::types::Secp256k1Message;
9use crate::common_types::types::SecretKey;
10use crate::common_types::types::TransactionVersion;
11use crate::common_types::types::TxIn;
12use crate::common_types::types::Uuid;
13use crate::common_types::types::Witness;
14use crate::error::SparkSdkError;
15use crate::signer::traits::SparkSigner;
16use crate::wallet::internal_handlers::traits::transfer::LeafKeyTweak;
17use crate::wallet::internal_handlers::traits::transfer::LeafRefundSigningData;
18use crate::wallet::internal_handlers::traits::transfer::TransferInternalHandlers;
19use crate::wallet::internal_handlers::utils::bitcoin_tx_from_bytes;
20use crate::wallet::internal_handlers::utils::parsers::parse_public_key;
21use crate::wallet::internal_handlers::utils::serialize_bitcoin_transaction;
22use crate::wallet::leaf_manager::LeafNode;
23use crate::wallet::utils::sequence::next_sequence;
24use crate::SparkSdk;
25use spark_cryptography::secret_sharing::secret_sharing::VerifiableSecretShare;
26use spark_protos::common::SignatureIntent;
27use spark_protos::common::SigningCommitment as ProtoSigningCommitment;
28use spark_protos::spark::ClaimLeafKeyTweak;
29use spark_protos::spark::ClaimTransferTweakKeysRequest;
30use spark_protos::spark::CompleteSendTransferRequest;
31use spark_protos::spark::FinalizeNodeSignaturesRequest;
32use spark_protos::spark::LeafRefundTxSigningJob;
33use spark_protos::spark::SendLeafKeyTweak;
34use spark_protos::spark::SigningJob;
35use spark_protos::spark::StartSendTransferRequest;
36use spark_protos::spark::Transfer;
37use spark_protos::spark::TreeNode;
38use std::collections::HashMap;
39
40// external crates
41use sha256::digest;
42use tonic::async_trait;
43
44#[async_trait]
45impl<S: SparkSigner + Send + Sync + Clone + 'static> TransferInternalHandlers<S> for SparkSdk<S> {
46    /// Top-level transfer execution function, given the leaves to transfer. The function expects the leaves to be in [`LeafKeyTweak`] format.
47    ///
48    /// # Arguments
49    ///
50    /// * `leaves` - A vector of [`LeafKeyTweak`] objects representing the leaves to transfer.
51    /// * `receiver_identity_pubkey` - The public key of the receiver.
52    /// * `expiry_time` - The expiry time of the transfer.
53    ///
54    /// # Returns
55    ///
56    /// A [`Transfer`] object representing the transfer. This is an auto-converted protobuf object that is returned by the Spark API as the last step of the transfer for the sender.
57    async fn start_send_transfer(
58        &self,
59        leaves: &Vec<LeafKeyTweak>,
60        receiver_identity_pubkey: Vec<u8>,
61        expiry_time: u64,
62    ) -> Result<Transfer, SparkSdkError> {
63        // Send the transfer, get the refund signatures, and aggregate by adding yours.
64        let (transfer, refund_signatures) = self
65            .send_transfer_sign_refunds(leaves, receiver_identity_pubkey, expiry_time)
66            .await?;
67
68        // Send the transfer, get the tweak keys, and send them.
69        let transfer = self
70            .send_transfer_tweak_key(transfer, leaves, &refund_signatures)
71            .await?;
72
73        Ok(transfer)
74    }
75
76    async fn send_transfer_tweak_key(
77        &self,
78        transfer: Transfer,
79        leaves: &Vec<LeafKeyTweak>,
80        refund_signature_map: &HashMap<String, Vec<u8>>,
81    ) -> Result<Transfer, SparkSdkError> {
82        // Prepare the key tweaks for the transfer.
83        let key_tweak_input_map =
84            self.prepare_send_transfer_key_tweaks(&transfer, leaves, refund_signature_map)?;
85
86        let mut updated_transfer: Option<Transfer> = None;
87
88        for operator in &self.config.spark_config.spark_operators {
89            let mut spark_client = self
90                .config
91                .spark_config
92                .get_spark_connection(Some(operator.id))
93                .await?;
94
95            let mut request = tonic::Request::new(CompleteSendTransferRequest {
96                transfer_id: transfer.id.clone(),
97                owner_identity_public_key: self.get_identity_public_key().to_vec(),
98                leaves_to_send: key_tweak_input_map[&operator.frost_identifier].clone(),
99            });
100
101            self.add_authorization_header_to_request(&mut request, Some(operator.id));
102
103            let response = spark_client
104                .complete_send_transfer(request)
105                .await?
106                .into_inner();
107
108            match &updated_transfer {
109                None => updated_transfer = response.transfer,
110                Some(existing) => {
111                    if !self.compare_transfers(existing, &response.transfer.unwrap()) {
112                        return Err(SparkSdkError::TransferError(
113                            "Inconsistent transfer responses from operators".into(),
114                        ));
115                    }
116                }
117            }
118        }
119
120        Ok(updated_transfer.unwrap())
121    }
122
123    async fn send_transfer_sign_refunds(
124        &self,
125        leaves: &Vec<LeafKeyTweak>,
126        receiver_identity_pubkey: Vec<u8>,
127        expiry_time: u64,
128    ) -> Result<(Transfer, HashMap<String, Vec<u8>>), SparkSdkError> {
129        // Generate a new transfer ID.
130        let transfer_id = Uuid::now_v7().to_string();
131
132        // Prepare signing data for each leaf
133        let mut leaf_data_map = HashMap::new();
134        for leaf_key in leaves {
135            let commitments = self.signer.new_frost_signing_noncepair()?;
136            let node_tx = bitcoin_tx_from_bytes(&leaf_key.leaf.node_tx)?;
137
138            let secp = Secp256k1::new();
139
140            println!(
141                "[send_transfer_sign_refunds] here in loop x -- 1: {:?}",
142                hex::encode(&leaf_key.old_signing_private_key)
143            );
144
145            let signing_secret_key = bitcoin::secp256k1::SecretKey::from_slice(
146                &leaf_key.old_signing_private_key.clone(),
147            )?;
148            let signing_public_key = signing_secret_key.public_key(&secp).serialize().to_vec();
149
150            println!(
151                "[send_transfer_sign_refunds] here in loop x -- 2: {:?}",
152                hex::encode(&signing_public_key)
153            );
154
155            let leaf_refund_signing_data = LeafRefundSigningData {
156                signing_public_key,
157                receiving_pubkey: receiver_identity_pubkey.clone(),
158                commitment: frost_commitment_to_proto_commitment(&commitments)?,
159                tx: node_tx,
160                refund_tx: None,
161                vout: leaf_key.leaf.vout,
162            };
163            leaf_data_map.insert(leaf_key.leaf.id.clone(), leaf_refund_signing_data);
164        }
165
166        println!("[send_transfer_sign_refunds] here 2");
167
168        // Create the signing jobs needed for the transfer.
169        let signing_jobs = self.prepare_refund_so_signing_jobs(leaves, &mut leaf_data_map)?;
170
171        // Send the first transfer request as the sender.
172        let mut client = self.config.spark_config.get_spark_connection(None).await?;
173        let mut request = tonic::Request::new(StartSendTransferRequest {
174            transfer_id: transfer_id.clone(),
175            leaves_to_send: signing_jobs,
176            owner_identity_public_key: self.get_identity_public_key().to_vec(),
177            receiver_identity_public_key: receiver_identity_pubkey,
178            expiry_time: Some(prost_types::Timestamp {
179                seconds: expiry_time as i64,
180                nanos: 0,
181            }),
182        });
183
184        println!("[send_transfer_sign_refunds] here 3");
185
186        self.add_authorization_header_to_request(&mut request, None);
187        let response = client.start_send_transfer(request).await?.into_inner();
188        let transfer = response.transfer.unwrap(); // `transfer` is always Some.
189
190        println!("[send_transfer_sign_refunds] here 4");
191
192        // Sign the refunds and aggregate signatures combining your and Spark Operators' signatures. This will give you the signature for the transfer transaction to be sent to the receiver.
193        let signing_results = response.signing_results;
194        let signatures =
195            self.signer
196                .sign_transfer_refunds(&leaf_data_map, &signing_results, vec![])?;
197
198        println!("[send_transfer_sign_refunds] here 5");
199
200        // Create a map of node IDs to signatures.
201        let mut signature_map = HashMap::new();
202        for leaf_signature in signatures {
203            signature_map.insert(leaf_signature.node_id, leaf_signature.refund_tx_signature);
204        }
205
206        println!("[send_transfer_sign_refunds] here 6");
207
208        Ok((transfer, signature_map))
209    }
210
211    fn prepare_send_transfer_key_tweaks(
212        &self,
213        transfer: &Transfer,
214        leaves: &Vec<LeafKeyTweak>,
215        refund_signature_map: &HashMap<String, Vec<u8>>,
216    ) -> Result<HashMap<String, Vec<SendLeafKeyTweak>>, SparkSdkError> {
217        let mut leaves_tweaks_map = HashMap::new();
218        for leaf in leaves {
219            // Get the refund signature
220            let leaf_refund_signature = refund_signature_map[&leaf.leaf.id].clone();
221
222            // Get the tweaks for this leaf by preparing the key tweak data
223            let leaf_tweaks = self.prepare_single_send_transfer_key_tweak(
224                &transfer.id,
225                leaf,
226                &transfer.receiver_identity_public_key,
227                &leaf_refund_signature,
228            )?;
229
230            // Add the tweaks to the map, grouped by identifier
231            for (identifier, leaf_tweak) in leaf_tweaks {
232                leaves_tweaks_map
233                    .entry(identifier)
234                    .or_insert_with(Vec::new)
235                    .push(leaf_tweak);
236            }
237        }
238
239        Ok(leaves_tweaks_map)
240    }
241
242    fn prepare_single_send_transfer_key_tweak(
243        &self,
244        transfer_id: &str,
245        leaf: &LeafKeyTweak,
246        receiver_pubkey: &Vec<u8>,
247        refund_signature: &Vec<u8>,
248    ) -> Result<HashMap<String, SendLeafKeyTweak>, SparkSdkError> {
249        let secp = Secp256k1::new();
250        let publickey = bitcoin::secp256k1::SecretKey::from_slice(&leaf.old_signing_private_key)
251            .unwrap()
252            .public_key(&secp)
253            .serialize()
254            .to_vec();
255        let tweaked_public_key = self.signer.subtract_secret_keys_given_pubkeys(
256            &publickey,
257            &leaf.new_signing_public_key,
258            true,
259        )?;
260
261        // Split the secret key that belongs to the tweaked public key.
262        let shares = self
263            .signer
264            .split_from_public_key_with_verifiable_secret_sharing(
265                tweaked_public_key,
266                self.config.spark_config.threshold as usize,
267                self.config.spark_config.spark_operators.len(),
268            )?;
269
270        let mut pubkey_shares_tweak = HashMap::new();
271        for operator in &self.config.spark_config.spark_operators {
272            let share = find_share(&shares, operator.id.into())?;
273
274            let share_scalar =
275                SecretKey::from_slice(&share.secret_share.share.to_bytes().to_vec())?;
276            let pubkey_tweak = PublicKey::from_secret_key(&Secp256k1::new(), &share_scalar);
277            pubkey_shares_tweak.insert(
278                operator.frost_identifier.clone(),
279                pubkey_tweak.serialize().to_vec(),
280            );
281        }
282
283        // Generate signature over payload
284        // First, let's encrypt the new signing private key using ECIES
285        let secret_cipher = self
286            .signer
287            .encrypt_secret_key_with_ecies(receiver_pubkey, &leaf.new_signing_public_key)?;
288
289        // Now we'll create the payload exactly as in the Go code by concatenating:
290        // 1. leaf ID
291        // 2. transfer ID
292        // 3. encrypted secret (secret_cipher)
293        let payload = [
294            leaf.leaf.id.as_bytes(), // leaf ID bytes
295            transfer_id.as_bytes(),  // transfer ID bytes
296            &secret_cipher,          // encrypted secret bytes
297        ]
298        .concat();
299
300        // Sign the hash using ECDSA with our identity private key
301        // Note: We use the raw hash bytes directly, no hex encoding needed
302        let signature = self
303            .signer
304            .sign_message_ecdsa_with_identity_key(payload, true)?;
305        // let signature = signature.serialize_der().to_vec();
306
307        let mut leaf_tweaks_map = HashMap::new();
308        for operator in &self.config.spark_config.spark_operators {
309            let share = find_share(&shares, operator.id.into())?;
310            leaf_tweaks_map.insert(
311                operator.frost_identifier.clone(),
312                SendLeafKeyTweak {
313                    leaf_id: leaf.leaf.id.clone(),
314                    secret_share_tweak: Some(spark_protos::spark::SecretShare {
315                        secret_share: share.secret_share.share.to_bytes().to_vec(),
316                        proofs: share.proofs.clone(),
317                    }),
318                    pubkey_shares_tweak: pubkey_shares_tweak.clone(),
319                    secret_cipher: secret_cipher.clone(),
320                    signature: signature.clone(),
321                    refund_signature: refund_signature.to_vec(),
322                },
323            );
324        }
325
326        Ok(leaf_tweaks_map)
327    }
328
329    fn prepare_refund_so_signing_jobs(
330        &self,
331        leaves: &Vec<LeafKeyTweak>,
332        leaf_data_map: &mut HashMap<String, LeafRefundSigningData>,
333    ) -> Result<Vec<LeafRefundTxSigningJob>, SparkSdkError> {
334        let mut signing_jobs = Vec::new();
335
336        for leaf in leaves {
337            let refund_signing_data = leaf_data_map
338                .get_mut(&leaf.leaf.id)
339                .ok_or_else(|| SparkSdkError::InvalidInput("Leaf data not found".into()))?;
340
341            let signing_pubkey = PublicKey::from_slice(&refund_signing_data.signing_public_key)?;
342            let refund_tx =
343                self.create_refund_tx(&leaf.leaf, &refund_signing_data.receiving_pubkey)?;
344            let refund_bytes = serialize_bitcoin_transaction(&refund_tx)?;
345
346            refund_signing_data.refund_tx = Some(refund_tx);
347
348            let refund_commitment_proto = refund_signing_data.commitment.clone();
349
350            signing_jobs.push(LeafRefundTxSigningJob {
351                leaf_id: leaf.leaf.id.clone(),
352                refund_tx_signing_job: Some(SigningJob {
353                    signing_public_key: signing_pubkey.serialize().to_vec(),
354                    raw_tx: refund_bytes,
355                    signing_nonce_commitment: Some(refund_commitment_proto),
356                }),
357            });
358        }
359
360        Ok(signing_jobs)
361    }
362
363    fn create_refund_tx(
364        &self,
365        leaf: &TreeNode,
366        receiving_pubkey: &Vec<u8>,
367    ) -> Result<bitcoin::Transaction, SparkSdkError> {
368        let node_tx = bitcoin_tx_from_bytes(&leaf.node_tx)?;
369        let refund_tx = bitcoin_tx_from_bytes(&leaf.refund_tx)?;
370
371        let mut new_refund_tx = bitcoin::Transaction {
372            version: TransactionVersion::TWO,
373            lock_time: AbsoluteLockTime::ZERO,
374            input: vec![],
375            output: vec![],
376        };
377
378        let sequence = next_sequence(refund_tx.input[0].sequence.0);
379        let sequence = bitcoin::Sequence(sequence);
380
381        new_refund_tx.input.push(TxIn {
382            previous_output: OutPoint {
383                txid: node_tx.compute_txid(),
384                vout: 0,
385            },
386            script_sig: ScriptBuf::default(),
387            sequence,
388            witness: Witness::default(),
389        });
390
391        let secp = Secp256k1::new();
392        let pubkey = PublicKey::from_slice(receiving_pubkey)?;
393        let addr = bitcoin::Address::p2tr(
394            &secp,
395            pubkey.x_only_public_key().0,
396            None,
397            self.config.spark_config.network.to_bitcoin_network(),
398        );
399
400        new_refund_tx.output.push(bitcoin::TxOut {
401            value: node_tx.output[0].value,
402            script_pubkey: addr.script_pubkey(),
403        });
404
405        Ok(new_refund_tx)
406    }
407
408    // Helper methods
409    fn compare_transfers(&self, t1: &Transfer, t2: &Transfer) -> bool {
410        t1.id == t2.id
411            && t1.receiver_identity_public_key == t2.receiver_identity_public_key
412            && t1.status == t2.status
413            && t1.total_value == t2.total_value
414            && t1.expiry_time.as_ref().map(|t| t.seconds)
415                == t2.expiry_time.as_ref().map(|t| t.seconds)
416            && t1.leaves.len() == t2.leaves.len()
417    }
418
419    async fn claim_finalize_incoming_transfer(
420        &self,
421        transfer: &Transfer,
422        leaves: &Vec<LeafKeyTweak>,
423    ) -> Result<(), SparkSdkError> {
424        // First tweak the keys
425        self.claim_transfer_tweak_keys(transfer, leaves).await?;
426
427        // Then sign the refunds
428        let signatures = self.claim_transfer_sign_refunds(transfer, leaves).await?;
429
430        // Finally, finalize the transfer
431        self.finalize_transfer(&signatures).await?;
432
433        let mut leaf_nodes = vec![];
434        for (leaf, _) in leaves.iter().zip(signatures.iter()) {
435            let leaf_node = LeafNode::new(
436                leaf.leaf.id.clone(),
437                leaf.leaf.tree_id.clone(),
438                leaf.leaf.value,
439                leaf.leaf.parent_node_id.clone(),
440                leaf.leaf.vout,
441                leaf.leaf.verifying_public_key.clone(),
442                leaf.new_signing_public_key.clone(),
443                // signature.node_tx_signature.clone(),
444                // signature.refund_tx_signature.clone(),
445                leaf.leaf.node_tx.clone(),
446                leaf.leaf.refund_tx.clone(),
447                None,
448                vec![],
449                vec![],
450            );
451
452            leaf_nodes.push(leaf_node);
453        }
454
455        self.leaf_manager.insert_leaves_in_batch(leaf_nodes)?;
456
457        Ok(())
458    }
459
460    async fn claim_transfer_tweak_keys(
461        &self,
462        transfer: &Transfer,
463        leaves: &Vec<LeafKeyTweak>,
464    ) -> Result<(), SparkSdkError> {
465        let leaves_tweaks_map = self.prepare_claim_leaves_key_tweaks(leaves)?;
466
467        for operator in &self.config.spark_config.spark_operators {
468            let mut spark_client = self
469                .config
470                .spark_config
471                .get_spark_connection(Some(operator.id))
472                .await?;
473
474            let request = ClaimTransferTweakKeysRequest {
475                transfer_id: transfer.id.clone(),
476                owner_identity_public_key: self.get_identity_public_key().to_vec(),
477                leaves_to_receive: leaves_tweaks_map[&operator.frost_identifier].clone(),
478            };
479            let mut tonic_request = tonic::Request::new(request);
480            self.add_authorization_header_to_request(&mut tonic_request, Some(operator.id));
481
482            let response = spark_client.claim_transfer_tweak_keys(tonic_request).await;
483            response?;
484        }
485
486        Ok(())
487    }
488
489    fn prepare_claim_leaves_key_tweaks(
490        &self,
491        leaves: &Vec<LeafKeyTweak>,
492    ) -> Result<HashMap<String, Vec<ClaimLeafKeyTweak>>, SparkSdkError> {
493        let mut leaves_tweaks_map = HashMap::new();
494
495        for leaf in leaves {
496            let leaf_tweaks = self.prepare_claim_leaf_key_tweaks(&leaf)?;
497
498            for (identifier, leaf_tweak) in leaf_tweaks {
499                leaves_tweaks_map
500                    .entry(identifier)
501                    .or_insert_with(Vec::new)
502                    .push(leaf_tweak);
503            }
504        }
505
506        println!("Prepared claim leaves key tweaks.");
507
508        Ok(leaves_tweaks_map)
509    }
510
511    fn prepare_claim_leaf_key_tweaks(
512        &self,
513        leaf: &LeafKeyTweak,
514    ) -> Result<HashMap<String, ClaimLeafKeyTweak>, SparkSdkError> {
515        let secp = Secp256k1::new();
516        let tweaked_public_key = self.signer.subtract_secret_keys_given_pubkeys(
517            &bitcoin::secp256k1::SecretKey::from_slice(&leaf.old_signing_private_key)
518                .unwrap()
519                .public_key(&secp)
520                .serialize()
521                .to_vec(),
522            &leaf.new_signing_public_key,
523            true,
524        )?;
525
526        let shares = self
527            .signer
528            .split_from_public_key_with_verifiable_secret_sharing(
529                tweaked_public_key,
530                // &minus_one.to_be_bytes().to_vec(),
531                self.config.spark_config.threshold as usize,
532                self.config.spark_config.spark_operators.len(),
533            )
534            .unwrap();
535
536        let mut pubkey_shares_tweak = HashMap::new();
537        for operator in &self.config.spark_config.spark_operators {
538            let share = find_share(&shares, operator.id.into())?;
539
540            // This part comes from Spark cryptography, so the secret key is in the code.
541            let share_scalar =
542                SecretKey::from_slice(&share.secret_share.share.to_bytes().to_vec())?;
543            let pubkey_tweak = PublicKey::from_secret_key(&Secp256k1::new(), &share_scalar);
544            pubkey_shares_tweak.insert(
545                operator.frost_identifier.clone(),
546                pubkey_tweak.serialize().to_vec(),
547            );
548        }
549
550        let mut leaf_tweaks_map = HashMap::new();
551        for operator in &self.config.spark_config.spark_operators {
552            let share = find_share(&shares, operator.id.into())?;
553
554            leaf_tweaks_map.insert(
555                operator.frost_identifier.clone(),
556                ClaimLeafKeyTweak {
557                    leaf_id: leaf.leaf.id.clone(),
558                    secret_share_tweak: Some(spark_protos::spark::SecretShare {
559                        secret_share: share.secret_share.share.to_bytes().to_vec(),
560                        proofs: share.proofs.clone(),
561                    }),
562                    pubkey_shares_tweak: pubkey_shares_tweak.clone(),
563                },
564            );
565        }
566
567        Ok(leaf_tweaks_map)
568    }
569
570    async fn claim_transfer_sign_refunds(
571        &self,
572        transfer: &Transfer,
573        leaf_keys: &Vec<LeafKeyTweak>,
574    ) -> Result<Vec<spark_protos::spark::NodeSignatures>, SparkSdkError> {
575        // Create a map to store refund signing data for each leaf
576        let mut leaf_data_map = HashMap::new();
577
578        for leaf_key in leaf_keys {
579            // For each leaf key, we create the signing data using the new private key
580            let new_public_key = leaf_key.new_signing_public_key.clone();
581
582            let commitments = self.signer.new_frost_signing_noncepair()?;
583
584            // Deserialize the transaction from raw bytes
585            let tx = match bitcoin::consensus::deserialize(&leaf_key.leaf.node_tx) {
586                Ok(tx) => tx,
587                Err(e) => {
588                    return Err(SparkSdkError::InvalidInput(format!(
589                        "Failed to deserialize transaction: {}",
590                        e
591                    )));
592                }
593            };
594
595            // Store all the necessary data for signing
596            leaf_data_map.insert(
597                leaf_key.leaf.id.clone(),
598                LeafRefundSigningData {
599                    signing_public_key: new_public_key.to_vec(),
600                    receiving_pubkey: new_public_key.to_vec(),
601                    commitment: frost_commitment_to_proto_commitment(&commitments)?,
602                    tx,
603                    refund_tx: None,
604                    vout: leaf_key.leaf.vout,
605                },
606            );
607        }
608
609        // Prepare the signing jobs for each leaf
610        let signing_jobs = self.prepare_refund_so_signing_jobs(&leaf_keys, &mut leaf_data_map)?;
611
612        // Request signing of refunds
613        let mut spark_client = self.config.spark_config.get_spark_connection(None).await?;
614        let request = spark_protos::spark::ClaimTransferSignRefundsRequest {
615            transfer_id: transfer.id.clone(),
616            owner_identity_public_key: self.get_identity_public_key().to_vec(),
617            signing_jobs,
618        };
619        let mut tonic_request = tonic::Request::new(request);
620        self.add_authorization_header_to_request(&mut tonic_request, None);
621
622        let response = spark_client
623            .claim_transfer_sign_refunds(tonic_request)
624            .await?
625            .into_inner();
626
627        // Process the signing results and generate final signatures
628        self.signer
629            .sign_transfer_refunds(&leaf_data_map, &response.signing_results, vec![])
630    }
631
632    async fn finalize_transfer(
633        &self,
634        signatures: &[spark_protos::spark::NodeSignatures],
635    ) -> Result<(), SparkSdkError> {
636        let mut spark_client = self.config.spark_config.get_spark_connection(None).await?;
637        let request = FinalizeNodeSignaturesRequest {
638            intent: SignatureIntent::Transfer as i32,
639            node_signatures: signatures.to_vec(),
640        };
641        let mut tonic_request = tonic::Request::new(request);
642        self.add_authorization_header_to_request(&mut tonic_request, None);
643
644        spark_client.finalize_node_signatures(tonic_request).await?;
645
646        Ok(())
647    }
648
649    async fn verify_pending_transfer(
650        &self,
651        transfer: &spark_protos::spark::Transfer,
652    ) -> Result<HashMap<String, Vec<u8>>, SparkSdkError> {
653        // Create the map for leafID -> decrypted leaf private key
654        let mut leaf_privkey_map = HashMap::new();
655
656        // 1) Parse the sender's public key
657        let secp = Secp256k1::new();
658        let sender_pubkey = parse_public_key(&transfer.sender_identity_public_key)?;
659
660        // 2) For each leaf, verify the signature, then decrypt the secret
661        for leaf in &transfer.leaves {
662            // Parse signature from DER
663            let signature = EcdsaSignature::from_der(&leaf.signature).map_err(|e| {
664                SparkSdkError::InvalidInput(format!("Failed to parse DER signature: {e}"))
665            })?;
666
667            // Create the message to verify: leaf.Leaf.Id + transfer.Id + leaf.SecretCipher
668            let mut payload = leaf
669                .leaf
670                .as_ref()
671                .map(|l| l.id.clone())
672                .unwrap_or_default()
673                .into_bytes();
674            payload.extend_from_slice(transfer.id.as_bytes());
675            payload.extend_from_slice(&leaf.secret_cipher);
676
677            // Hash the payload
678            let payload_hash_hex = digest(&payload);
679            let payload_hash_bytes = hex::decode(payload_hash_hex).map_err(|e| {
680                SparkSdkError::InvalidInput(format!("Failed to decode hex payload hash: {e}"))
681            })?;
682
683            // Build secp256k1 message and verify ECDSA signature
684            let msg = Secp256k1Message::from_digest_slice(&payload_hash_bytes).map_err(|e| {
685                SparkSdkError::InvalidInput(format!(
686                    "Failed to create message for signature verify: {e}"
687                ))
688            })?;
689
690            secp.verify_ecdsa(&msg, &signature, &sender_pubkey)
691                .map_err(|e| {
692                    SparkSdkError::InvalidInput(format!("Failed to verify signature: {e}"))
693                })?;
694
695            // Decrypt secret cipher with our identity key (assuming the SparkSigner can do ECIES decryption)
696            let leaf_secret = self
697                .signer
698                .decrypt_secret_key_with_ecies(&leaf.secret_cipher)?;
699
700            // Record the decrypted leaf secret (private key) in the map
701            if let Some(leaf_node) = &leaf.leaf {
702                leaf_privkey_map.insert(leaf_node.id.clone(), leaf_secret);
703            }
704        }
705
706        Ok(leaf_privkey_map)
707    }
708}
709
710fn find_share(
711    shares: &Vec<VerifiableSecretShare>,
712    operator_id: u64,
713) -> Result<VerifiableSecretShare, SparkSdkError> {
714    let target_index = k256::Scalar::from(operator_id + 1);
715    shares
716        .iter()
717        .find(|s| s.secret_share.index == target_index)
718        .cloned()
719        .ok_or_else(|| SparkSdkError::InvalidInput("Share not found".into()))
720}
721
722fn frost_commitment_to_proto_commitment(
723    commitments: &FrostSigningCommitments,
724) -> Result<ProtoSigningCommitment, SparkSdkError> {
725    let hiding = commitments.hiding().serialize().unwrap();
726    let binding = commitments.binding().serialize().unwrap();
727
728    Ok(ProtoSigningCommitment { hiding, binding })
729}