spark_sdk/wallet/config/
spark.rs

1use crate::constants::spark::connection::DEFAULT_COORDINATOR_INDEX;
2use crate::constants::spark::LIGHTSPARK_SSP_ENDPOINT;
3use crate::constants::spark::LIGHTSPARK_SSP_IDENTITY_PUBLIC_KEY;
4use crate::error::SparkSdkError;
5use crate::rpc::connections::connection::SparkConnection;
6use crate::rpc::SparkRpcClient;
7use crate::SparkNetwork;
8use frost_secp256k1_tr::Identifier;
9use hashbrown::HashMap;
10use parking_lot::RwLock;
11use spark_protos::spark::spark_service_client::SparkServiceClient;
12use std::sync::Arc;
13use tonic::transport::Channel;
14
15use crate::constants::spark::connection::SPARK_REGTEST_OPERATORS;
16use crate::constants::spark::SPARK_REGTEST_SIGNING_THRESHOLD;
17
18use crate::rpc::traits::SparkRpcConnection;
19
20/// Configuration for the Spark wallet
21#[derive(Clone)]
22pub(crate) struct SparkConfig {
23    /// Network to use for the wallet (mainnet, testnet, etc.)
24    pub(crate) network: SparkNetwork,
25
26    /// Map of signing operator identifiers to their configurations
27    pub(crate) spark_operators: Vec<SparkOperator>,
28
29    /// SSP endpoint
30    pub(crate) ssp_endpoint: String,
31
32    /// SSP identity public key
33    pub(crate) ssp_identity_public_key: Vec<u8>,
34
35    /// Index of the coordinator signing operator
36    pub(crate) coordinator_index: u32,
37
38    /// The threshold constant used for signing
39    pub(crate) threshold: u32,
40
41    /// gRPC client used for Spark Operator RPC calls
42    spark_clients: Arc<RwLock<HashMap<u32, SparkRpcClient>>>,
43}
44
45/// Configuration for a signing operator
46#[derive(Debug, Clone)]
47pub(crate) struct SparkOperator {
48    /// The index of the signing operator.
49    pub(crate) id: u32,
50
51    /// Identifier is the FROST identifier of the signing operator, which will be index + 1 in 32 bytes big endian hex string.
52    /// Used as shamir secret share identifier in DKG key shares.
53    pub(crate) frost_identifier: String,
54
55    /// Address of the signing operator
56    pub(crate) address: String,
57
58    /// Public key of the signing operator
59    pub(crate) identity_public_key: Vec<u8>,
60}
61
62impl SparkConfig {
63    /// Creates a new wallet configuration with the specified network.
64    ///
65    /// This function:
66    /// 1. Creates a vector of 5 Spark operators with their configurations
67    /// 2. Sets up gRPC connections to each operator
68    /// 3. Initializes the wallet configuration with default values
69    ///
70    /// # Arguments
71    ///
72    /// * `network` - The Spark network to use (mainnet, testnet, regtest)
73    ///
74    /// # Returns
75    ///
76    /// Returns a Result containing either:
77    /// * The initialized SparkConfig struct on success
78    /// * A [`SparkSdkError`] on failure (e.g. connection errors, invalid URLs)
79    ///
80    /// # Errors
81    ///
82    /// This function will return an error if:
83    /// * Failed to establish gRPC connections to operators
84    /// * Failed to parse operator URLs
85    /// * Failed to decode operator public keys
86    pub async fn new(network: SparkNetwork) -> Result<Self, SparkSdkError> {
87        // set the threshold
88        #[allow(unused_mut)]
89        let mut threshold = 0;
90        threshold = SPARK_REGTEST_SIGNING_THRESHOLD;
91
92        // set the default coordinator index
93        let coordinator_index = DEFAULT_COORDINATOR_INDEX;
94
95        // set the operators
96        let mut spark_operators = vec![];
97
98        let operators = SPARK_REGTEST_OPERATORS;
99        for (i, operator) in operators.iter().enumerate() {
100            // TODO: Add environment
101            let url = operator.0;
102            let pubkey = operator.1;
103            let pubkey = hex::decode(pubkey).expect("Failed to decode Spark operator pubkey");
104
105            let frost_identifier = Identifier::try_from(i as u16 + 1).unwrap();
106
107            spark_operators.push(SparkOperator {
108                id: i as u32,
109                frost_identifier: hex::encode(frost_identifier.serialize().to_vec()),
110                address: url.to_string(),
111                identity_public_key: pubkey,
112            });
113        }
114
115        // set the connection pool for operators
116        let mut spark_clients = HashMap::new();
117        for operator in &spark_operators {
118            // parse the URI
119            let operator_url = operator.address.clone();
120            let uri = operator_url.parse()?;
121
122            // establish the secure connection
123            // since this uses rustls, self-signed certificates will fail
124            let spark_rpc_client = SparkConnection::establish_connection(uri).await?;
125            spark_clients.insert(operator.id, spark_rpc_client);
126        }
127
128        // Create the wallet configuration
129        let ssp_identity_public_key = hex::decode(LIGHTSPARK_SSP_IDENTITY_PUBLIC_KEY)?;
130
131        println!(
132            "Using SSP public key: {:?}",
133            hex::encode(&ssp_identity_public_key)
134        );
135        let wallet_config = Self {
136            network,
137            spark_operators,
138            coordinator_index,
139            threshold,
140            spark_clients: Arc::new(RwLock::new(spark_clients)),
141            ssp_endpoint: LIGHTSPARK_SSP_ENDPOINT.to_string(),
142            ssp_identity_public_key,
143        };
144
145        Ok(wallet_config)
146    }
147
148    /// Gets a connection to a Spark operator service.
149    ///
150    /// This function manages connections to Spark operators, creating new connections if needed
151    /// and reusing existing ones. It handles both connecting to a specific operator or defaulting
152    /// to the coordinator.
153    ///
154    /// # Arguments
155    ///
156    /// * `operator_id` - Optional ID of the specific operator to connect to. If None, connects to
157    ///                   the default coordinator.
158    ///
159    /// # Returns
160    ///
161    /// * [`Result<SparkServiceClient<Channel>, SparkSdkError>`] - A client for the Spark service on success,
162    ///    or an error if the connection fails
163    ///
164    /// # Errors
165    ///
166    /// Returns [`SparkSdkError::InvalidArgument`] if:
167    /// * The operator_id is out of bounds for the available operators
168    ///
169    /// May also return errors from:
170    /// * Channel creation/connection
171    /// * URI parsing
172    pub(crate) async fn get_spark_connection(
173        &self,
174        operator_id: Option<u32>,
175    ) -> Result<SparkServiceClient<Channel>, SparkSdkError> {
176        let operator_id = operator_id.unwrap_or(self.coordinator_index);
177
178        // if doesn't exist, create the connection for the operator
179        if !self.spark_clients.read().contains_key(&operator_id) {
180            let spark_operators = self.spark_operators.clone();
181            if operator_id >= spark_operators.len() as u32 {
182                drop(spark_operators);
183                return Err(SparkSdkError::InvalidArgument(format!(
184                    "Operator index {} is out of bounds",
185                    operator_id
186                )));
187            }
188
189            let operator_url = spark_operators[operator_id as usize].address.clone();
190            let uri = operator_url.parse()?;
191            let spark_rpc_instance = SparkConnection::establish_connection(uri).await?;
192            self.spark_clients
193                .write()
194                .insert(operator_id, spark_rpc_instance);
195        }
196
197        // get the connection
198        let client = self.spark_clients.read().get(&operator_id).unwrap().clone();
199        let spark_client = client.get_new_spark_service_connection()?;
200
201        Ok(spark_client)
202    }
203}