Skip to content

Commit

Permalink
fix: clippy and fmt
Browse files Browse the repository at this point in the history
  • Loading branch information
dirvine committed Nov 14, 2024
1 parent 8974026 commit 4bfc8dc
Show file tree
Hide file tree
Showing 3 changed files with 64 additions and 43 deletions.
11 changes: 5 additions & 6 deletions src/decrypt.rs
Original file line number Diff line number Diff line change
Expand Up @@ -13,13 +13,13 @@ use xor_name::XorName;

pub fn decrypt(src_hashes: Vec<XorName>, encrypted_chunks: &[&EncryptedChunk]) -> Result<Bytes> {
let mut all_bytes = Vec::new();

// Process chunks sequentially to maintain proper boundaries
for (chunk_index, chunk) in encrypted_chunks.iter().enumerate() {
let decrypted = decrypt_chunk(chunk_index, &chunk.content, &src_hashes)?;
all_bytes.extend_from_slice(&decrypted);
}

Ok(Bytes::from(all_bytes))
}

Expand All @@ -32,18 +32,17 @@ pub(crate) fn decrypt_chunk(
) -> Result<Bytes> {
let pki = get_pad_key_and_iv(chunk_index, src_hashes);
let (pad, key, iv) = pki;

// First remove the XOR obfuscation
let xored = xor(content, &pad);

// Then decrypt the content
let decrypted = encryption::decrypt(xored, &key, &iv)?;

// Finally decompress
let mut decompressed = Vec::new();
let mut cursor = Cursor::new(&decrypted);
let _size = brotli::BrotliDecompress(&mut cursor, &mut decompressed)
.map_err(|_| Error::Compression)?;
brotli::BrotliDecompress(&mut cursor, &mut decompressed).map_err(|_| Error::Compression)?;

Ok(Bytes::from(decompressed))
}
48 changes: 29 additions & 19 deletions src/lib.rs
Original file line number Diff line number Diff line change
Expand Up @@ -282,7 +282,7 @@ impl StreamSelfDecryptor {
pub fn decrypt_to_file(file_path: PathBuf, data_map: &DataMap) -> Result<Self> {
let temp_dir = tempdir()?;
let src_hashes = extract_hashes(data_map);

// Create mapping of indices to expected chunk hashes
let chunk_hash_map = data_map
.infos()
Expand All @@ -307,17 +307,19 @@ impl StreamSelfDecryptor {
/// Return true if all encrypted chunk got received and file decrypted.
pub fn next_encrypted(&mut self, encrypted_chunk: EncryptedChunk) -> Result<bool> {
let chunk_hash = XorName::from_content(&encrypted_chunk.content);

// Find the index for this chunk based on its hash
let chunk_index = self.chunk_hash_map
let chunk_index = self
.chunk_hash_map
.iter()
.find(|(_, &hash)| hash == chunk_hash)
.map(|(&idx, _)| idx);

if let Some(idx) = chunk_index {
if idx == self.chunk_index {
// Process this chunk immediately
let decrypted_content = decrypt_chunk(idx, &encrypted_chunk.content, &self.src_hashes)?;
let decrypted_content =
decrypt_chunk(idx, &encrypted_chunk.content, &self.src_hashes)?;
self.append_to_file(&decrypted_content)?;
self.chunk_index += 1;
self.drain_unprocessed()?;
Expand All @@ -328,7 +330,9 @@ impl StreamSelfDecryptor {
let file_path = self.temp_dir.path().join(hex::encode(chunk_hash));
let mut output_file = File::create(file_path)?;
output_file.write_all(&encrypted_chunk.content)?;
let _ = self.encrypted_chunks.insert(chunk_hash, encrypted_chunk.content);
let _ = self
.encrypted_chunks
.insert(chunk_hash, encrypted_chunk.content);
}
}

Expand All @@ -353,7 +357,8 @@ impl StreamSelfDecryptor {
fn drain_unprocessed(&mut self) -> Result<()> {
while let Some(&next_hash) = self.chunk_hash_map.get(&self.chunk_index) {
if let Some(content) = self.encrypted_chunks.remove(&next_hash) {
let decrypted_content = decrypt_chunk(self.chunk_index, &content, &self.src_hashes)?;
let decrypted_content =
decrypt_chunk(self.chunk_index, &content, &self.src_hashes)?;
self.append_to_file(&decrypted_content)?;
self.chunk_index += 1;
} else {
Expand Down Expand Up @@ -440,13 +445,19 @@ pub fn decrypt_full_set(data_map: &DataMap, chunks: &[EncryptedChunk]) -> Result
.iter()
.map(|info| (info.dst_hash, info.index))
.collect();

let mut sorted_chunks = Vec::with_capacity(chunks.len());
sorted_chunks.extend(chunks.iter().map(|c| {
let hash = XorName::from_content(&c.content);
(chunk_indices[&hash], c)
}).sorted_by_key(|(i, _)| *i).map(|(_, c)| c));

sorted_chunks.extend(
chunks
.iter()
.map(|c| {
let hash = XorName::from_content(&c.content);
(chunk_indices[&hash], c)
})
.sorted_by_key(|(i, _)| *i)
.map(|(_, c)| c),
);

decrypt::decrypt(src_hashes, &sorted_chunks)
}

Expand All @@ -458,7 +469,7 @@ pub fn decrypt_range(
len: usize,
) -> Result<Bytes> {
let src_hashes = extract_hashes(data_map);

// Create a mapping of chunk hashes to their indices
let chunk_indices: BTreeMap<XorName, usize> = data_map
.infos()
Expand All @@ -468,7 +479,7 @@ pub fn decrypt_range(

// Get chunk size info
let file_size = data_map.file_size();

// Calculate which chunks we need based on the range
let start_chunk = get_chunk_index(file_size, relative_pos);
let end_pos = std::cmp::min(relative_pos + len, file_size);
Expand All @@ -477,11 +488,10 @@ pub fn decrypt_range(
// Sort and filter chunks to only include the ones we need
let sorted_chunks: Vec<_> = chunks
.iter()
.map(|c| {
.filter_map(|c| {
let hash = XorName::from_content(&c.content);
chunk_indices.get(&hash).map(|&idx| (idx, c))
})
.filter_map(|x| x)
.filter(|(idx, _)| *idx >= start_chunk && *idx <= end_chunk)
.sorted_by_key(|(idx, _)| *idx)
.map(|(_, c)| c)
Expand All @@ -506,13 +516,13 @@ pub fn decrypt_range(
let decrypted = decrypt_chunk(chunk_idx, &chunk.content, &src_hashes)?;
all_bytes.extend_from_slice(&decrypted);
}

let bytes = Bytes::from(all_bytes);

// Calculate the actual offset within our decrypted data
let chunk_start_pos = get_start_position(file_size, start_chunk);
let internal_offset = relative_pos - chunk_start_pos;

if internal_offset >= bytes.len() {
return Ok(Bytes::new());
}
Expand Down
48 changes: 30 additions & 18 deletions src/tests.rs
Original file line number Diff line number Diff line change
Expand Up @@ -7,8 +7,8 @@
// permissions and limitations relating to use of the SAFE Network Software.

use crate::{
decrypt_full_set, decrypt_range, encrypt, get_chunk_size, get_num_chunks,
seek_info, test_helpers::random_bytes, DataMap, EncryptedChunk, Error, StreamSelfDecryptor,
decrypt_full_set, decrypt_range, encrypt, get_chunk_size, get_num_chunks, seek_info,
test_helpers::random_bytes, DataMap, EncryptedChunk, Error, StreamSelfDecryptor,
StreamSelfEncryptor, MIN_ENCRYPTABLE_BYTES,
};
use bytes::Bytes;
Expand Down Expand Up @@ -242,32 +242,40 @@ fn seek_and_join() -> Result<(), Error> {
// Create a file that's exactly 3 chunks in size
let file_size = 3 * MIN_ENCRYPTABLE_BYTES;
let original_data = random_bytes(file_size);

// Encrypt the data into chunks
let (data_map, encrypted_chunks) = encrypt_chunks(original_data.clone())?;

// Get the size of each chunk
let chunk_size = get_chunk_size(file_size, 0);

// Read the first two chunks (0 and 1)
let first_chunk = decrypt_range(&data_map, &encrypted_chunks, 0, chunk_size)?;
let second_chunk = decrypt_range(&data_map, &encrypted_chunks, chunk_size, chunk_size)?;

// Verify each chunk size
assert_eq!(first_chunk.len(), chunk_size, "First chunk has incorrect size");
assert_eq!(second_chunk.len(), chunk_size, "Second chunk has incorrect size");

assert_eq!(
first_chunk.len(),
chunk_size,
"First chunk has incorrect size"
);
assert_eq!(
second_chunk.len(),
chunk_size,
"Second chunk has incorrect size"
);

// Join the chunks
let mut combined = Vec::with_capacity(2 * chunk_size);
combined.extend_from_slice(&first_chunk);
combined.extend_from_slice(&second_chunk);
let combined = Bytes::from(combined);

// Verify against original data
let expected = original_data.slice(0..2 * chunk_size);
assert_eq!(combined.len(), expected.len(), "Combined length mismatch");
compare(expected, combined)?;

Ok(())
}

Expand All @@ -276,35 +284,39 @@ fn seek_with_length_over_data_size() -> Result<(), Error> {
let file_size = 10_000_000;
let bytes = random_bytes(file_size);
let start_pos = 512;

// Calculate length safely
let remaining_bytes = file_size.saturating_sub(start_pos);
let len = remaining_bytes.saturating_add(1); // Try to read one more byte than available

let (data_map, encrypted_chunks) = encrypt_chunks(bytes.clone())?;

// We expect to get data from start_pos to end of file
let expected_data = bytes.slice(start_pos..file_size);

let read_data = decrypt_range(&data_map, &encrypted_chunks, start_pos, len)?;
compare(expected_data, read_data)?;

// Also verify reading beyond end returns empty
let read_data = decrypt_range(&data_map, &encrypted_chunks, file_size + 1, 1)?;
assert!(read_data.is_empty(), "Reading beyond end should return empty");
assert!(
read_data.is_empty(),
"Reading beyond end should return empty"
);

Ok(())
}

#[test]
fn seek_over_chunk_limit() -> Result<(), Error> {
let start_size = 4_194_300;
for i in 0..5 { // Reduced iterations
for i in 0..5 {
// Reduced iterations
let file_size = start_size + i;
let bytes = random_bytes(file_size);
let pos = file_size / 4;
let len = std::cmp::min(file_size / 2, file_size - pos); // Ensure we don't read past end
let len = std::cmp::min(file_size / 2, file_size - pos); // Ensure we don't read past end

let expected_data = bytes.slice(pos..(pos + len));
let (data_map, encrypted_chunks) = encrypt_chunks(bytes.clone())?;

Expand Down

0 comments on commit 4bfc8dc

Please sign in to comment.