Skip to content

Commit

Permalink
Check device lock on connecting to peer
Browse files Browse the repository at this point in the history
  • Loading branch information
benthecarman committed Oct 31, 2023
1 parent a72a87c commit c264604
Show file tree
Hide file tree
Showing 4 changed files with 35 additions and 1 deletion.
5 changes: 4 additions & 1 deletion mutiny-core/src/node.rs
Original file line number Diff line number Diff line change
Expand Up @@ -700,6 +700,7 @@ impl<S: MutinyStorage> Node<S> {
#[cfg(target_arch = "wasm32")]
&self.websocket_proxy_addr,
&peer_connection_info,
&self.persister.storage,
self.logger.clone(),
self.peer_manager.clone(),
self.fee_estimator.clone(),
Expand Down Expand Up @@ -1648,7 +1649,7 @@ pub(crate) fn scoring_params() -> ProbabilisticScoringFeeParameters {

#[allow(clippy::too_many_arguments)]
async fn start_reconnection_handling<S: MutinyStorage>(
storage: &impl MutinyStorage,
storage: &S,
node_pubkey: PublicKey,
#[cfg(target_arch = "wasm32")] websocket_proxy_addr: String,
peer_man: Arc<dyn PeerManager>,
Expand Down Expand Up @@ -1699,6 +1700,7 @@ async fn start_reconnection_handling<S: MutinyStorage>(
#[cfg(target_arch = "wasm32")]
&websocket_proxy_addr_copy_proxy,
&PubkeyConnectionInfo::new(lsp.connection_string.as_str()).unwrap(),
&storage_copy,
proxy_logger.clone(),
peer_man_proxy.clone(),
proxy_fee_estimator.clone(),
Expand Down Expand Up @@ -1801,6 +1803,7 @@ async fn start_reconnection_handling<S: MutinyStorage>(
#[cfg(target_arch = "wasm32")]
&websocket_proxy_addr,
&peer_connection_info,
&connect_storage,
connect_logger.clone(),
connect_peer_man.clone(),
connect_fee_estimator.clone(),
Expand Down
10 changes: 10 additions & 0 deletions mutiny-core/src/peermanager.rs
Original file line number Diff line number Diff line change
Expand Up @@ -297,6 +297,7 @@ impl<S: MutinyStorage> RoutingMessageHandler for GossipMessageHandler<S> {
pub(crate) async fn connect_peer_if_necessary<S: MutinyStorage>(
#[cfg(target_arch = "wasm32")] websocket_proxy_addr: &str,
peer_connection_info: &PubkeyConnectionInfo,
storage: &S,
logger: Arc<MutinyLogger>,
peer_manager: Arc<dyn PeerManager>,
fee_estimator: Arc<MutinyFeeEstimator<S>>,
Expand All @@ -308,6 +309,15 @@ pub(crate) async fn connect_peer_if_necessary<S: MutinyStorage>(
{
Ok(())
} else {
// make sure we have the device lock before connecting
// otherwise we could cause force closes
if let Some(lock) = storage.fetch_device_lock().await? {
let id = storage.get_device_id()?;
if lock.is_locked(&id) {
return Err(MutinyError::AlreadyRunning);
}
}

// first check to see if the fee rate is mostly up to date
// if not, we need to have updated fees or force closures
// could occur due to UpdateFee message conflicts.
Expand Down
10 changes: 10 additions & 0 deletions mutiny-core/src/storage.rs
Original file line number Diff line number Diff line change
Expand Up @@ -397,6 +397,8 @@ pub trait MutinyStorage: Clone + Sized + 'static {
let lock = DeviceLock { time, device };
self.set_data(DEVICE_LOCK_KEY, lock, Some(time))
}

async fn fetch_device_lock(&self) -> Result<Option<DeviceLock>, MutinyError>;
}

#[derive(Clone)]
Expand Down Expand Up @@ -551,6 +553,10 @@ impl MutinyStorage for MemoryStorage {
async fn clear() -> Result<(), MutinyError> {
Ok(())
}

async fn fetch_device_lock(&self) -> Result<Option<DeviceLock>, MutinyError> {
self.get_device_lock()
}
}

// Dummy implementation for testing or if people want to ignore persistence
Expand Down Expand Up @@ -614,6 +620,10 @@ impl MutinyStorage for () {
async fn clear() -> Result<(), MutinyError> {
Ok(())
}

async fn fetch_device_lock(&self) -> Result<Option<DeviceLock>, MutinyError> {
self.get_device_lock()
}
}

#[derive(Clone)]
Expand Down
11 changes: 11 additions & 0 deletions mutiny-wasm/src/indexed_db.rs
Original file line number Diff line number Diff line change
Expand Up @@ -760,6 +760,17 @@ impl MutinyStorage for IndexedDbStorage {

Ok(())
}

async fn fetch_device_lock(&self) -> Result<Option<DeviceLock>, MutinyError> {
match self.vss.as_ref() {
None => self.get_device_lock(),
Some(vss) => {
let json = vss.get_object(DEVICE_LOCK_KEY).await?;
let device_lock = serde_json::from_value(json.value)?;
Ok(Some(device_lock))
}
}
}
}

#[cfg(test)]
Expand Down

0 comments on commit c264604

Please sign in to comment.