spark_sdk/wallet/config/
spark.rs1use crate::constants::spark::connection::DEFAULT_COORDINATOR_INDEX;
2use crate::constants::spark::LIGHTSPARK_SSP_ENDPOINT;
3use crate::constants::spark::LIGHTSPARK_SSP_IDENTITY_PUBLIC_KEY;
4use crate::error::SparkSdkError;
5use crate::rpc::connections::connection::SparkConnection;
6use crate::rpc::SparkRpcClient;
7use crate::SparkNetwork;
8use frost_secp256k1_tr::Identifier;
9use hashbrown::HashMap;
10use parking_lot::RwLock;
11use spark_protos::spark::spark_service_client::SparkServiceClient;
12use std::sync::Arc;
13use tonic::transport::Channel;
14
15use crate::constants::spark::connection::SPARK_REGTEST_OPERATORS;
16use crate::constants::spark::SPARK_REGTEST_SIGNING_THRESHOLD;
17
18use crate::rpc::traits::SparkRpcConnection;
19
20#[derive(Clone)]
22pub(crate) struct SparkConfig {
23 pub(crate) network: SparkNetwork,
25
26 pub(crate) spark_operators: Vec<SparkOperator>,
28
29 pub(crate) ssp_endpoint: String,
31
32 pub(crate) ssp_identity_public_key: Vec<u8>,
34
35 pub(crate) coordinator_index: u32,
37
38 pub(crate) threshold: u32,
40
41 spark_clients: Arc<RwLock<HashMap<u32, SparkRpcClient>>>,
43}
44
45#[derive(Debug, Clone)]
47pub(crate) struct SparkOperator {
48 pub(crate) id: u32,
50
51 pub(crate) frost_identifier: String,
54
55 pub(crate) address: String,
57
58 pub(crate) identity_public_key: Vec<u8>,
60}
61
62impl SparkConfig {
63 pub async fn new(network: SparkNetwork) -> Result<Self, SparkSdkError> {
87 #[allow(unused_mut)]
89 let mut threshold = 0;
90 threshold = SPARK_REGTEST_SIGNING_THRESHOLD;
91
92 let coordinator_index = DEFAULT_COORDINATOR_INDEX;
94
95 let mut spark_operators = vec![];
97
98 let operators = SPARK_REGTEST_OPERATORS;
99 for (i, operator) in operators.iter().enumerate() {
100 let url = operator.0;
102 let pubkey = operator.1;
103 let pubkey = hex::decode(pubkey).expect("Failed to decode Spark operator pubkey");
104
105 let frost_identifier = Identifier::try_from(i as u16 + 1).unwrap();
106
107 spark_operators.push(SparkOperator {
108 id: i as u32,
109 frost_identifier: hex::encode(frost_identifier.serialize().to_vec()),
110 address: url.to_string(),
111 identity_public_key: pubkey,
112 });
113 }
114
115 let mut spark_clients = HashMap::new();
117 for operator in &spark_operators {
118 let operator_url = operator.address.clone();
120 let uri = operator_url.parse()?;
121
122 let spark_rpc_client = SparkConnection::establish_connection(uri).await?;
125 spark_clients.insert(operator.id, spark_rpc_client);
126 }
127
128 let ssp_identity_public_key = hex::decode(LIGHTSPARK_SSP_IDENTITY_PUBLIC_KEY)?;
130
131 println!(
132 "Using SSP public key: {:?}",
133 hex::encode(&ssp_identity_public_key)
134 );
135 let wallet_config = Self {
136 network,
137 spark_operators,
138 coordinator_index,
139 threshold,
140 spark_clients: Arc::new(RwLock::new(spark_clients)),
141 ssp_endpoint: LIGHTSPARK_SSP_ENDPOINT.to_string(),
142 ssp_identity_public_key,
143 };
144
145 Ok(wallet_config)
146 }
147
148 pub(crate) async fn get_spark_connection(
173 &self,
174 operator_id: Option<u32>,
175 ) -> Result<SparkServiceClient<Channel>, SparkSdkError> {
176 let operator_id = operator_id.unwrap_or(self.coordinator_index);
177
178 if !self.spark_clients.read().contains_key(&operator_id) {
180 let spark_operators = self.spark_operators.clone();
181 if operator_id >= spark_operators.len() as u32 {
182 drop(spark_operators);
183 return Err(SparkSdkError::InvalidArgument(format!(
184 "Operator index {} is out of bounds",
185 operator_id
186 )));
187 }
188
189 let operator_url = spark_operators[operator_id as usize].address.clone();
190 let uri = operator_url.parse()?;
191 let spark_rpc_instance = SparkConnection::establish_connection(uri).await?;
192 self.spark_clients
193 .write()
194 .insert(operator_id, spark_rpc_instance);
195 }
196
197 let client = self.spark_clients.read().get(&operator_id).unwrap().clone();
199 let spark_client = client.get_new_spark_service_connection()?;
200
201 Ok(spark_client)
202 }
203}