Fixing CI/CD pipeline

This commit is contained in:
2025-12-11 12:43:13 +01:00
parent d001809a11
commit 0a82ea23d2
24 changed files with 333 additions and 237 deletions

View File

@@ -74,9 +74,20 @@ fn main() -> Result<(), Box<dyn std::error::Error>> {
compression,
password,
} => {
create_archive(output, files, max_attempts, encryption, compression, password)?;
create_archive(
output,
files,
max_attempts,
encryption,
compression,
password,
)?;
}
Commands::Extract { archive, output, password } => {
Commands::Extract {
archive,
output,
password,
} => {
extract_archive(archive, output, password)?;
}
Commands::List { archive, password } => {
@@ -129,10 +140,14 @@ fn create_archive(
// Add files
let pb = indicatif::ProgressBar::new(files.len() as u64);
pb.set_style(indicatif::ProgressStyle::default_bar()
.template("{spinner:.green} [{elapsed_precise}] [{bar:40.cyan/blue}] {pos}/{len} ({eta})")
.unwrap()
.progress_chars("#>-"));
pb.set_style(
indicatif::ProgressStyle::default_bar()
.template(
"{spinner:.green} [{elapsed_precise}] [{bar:40.cyan/blue}] {pos}/{len} ({eta})",
)
.unwrap()
.progress_chars("#>-"),
);
for file in &files {
if !file.exists() {
@@ -140,10 +155,11 @@ fn create_archive(
continue;
}
let archive_path = file.file_name()
.map(|n| PathBuf::from(n))
let archive_path = file
.file_name()
.map(PathBuf::from)
.unwrap_or_else(|| file.clone());
writer.add_file(file, archive_path)?;
pb.inc(1);
}
@@ -155,7 +171,7 @@ fn create_archive(
} else {
let pwd = rpassword::prompt_password("Enter password: ")?;
let password_confirm = rpassword::prompt_password("Confirm password: ")?;
if pwd != password_confirm {
return Err("Passwords do not match".into());
}
@@ -164,13 +180,17 @@ fn create_archive(
println!("Writing archive...");
let spinner = indicatif::ProgressBar::new_spinner();
spinner.set_style(indicatif::ProgressStyle::default_spinner().template("{spinner:.blue} {msg}").unwrap());
spinner.set_style(
indicatif::ProgressStyle::default_spinner()
.template("{spinner:.blue} {msg}")
.unwrap(),
);
spinner.enable_steady_tick(std::time::Duration::from_millis(100));
spinner.set_message("Encrypting and saving...");
writer.write_to_file(&output, password.as_bytes())?;
spinner.finish_and_clear();
println!("Archive created successfully: {:?}", output);
Ok(())
}
@@ -181,7 +201,7 @@ fn extract_archive(
password: Option<String>,
) -> Result<(), Box<dyn std::error::Error>> {
let mut reader = ArchiveReader::open(&archive)?;
// Show archive info
let info = reader.get_info();
println!("Remaining attempts: {}", info.remaining_attempts);
@@ -190,30 +210,33 @@ fn extract_archive(
}
// Get password
let password = password.unwrap_or_else(|| {
rpassword::prompt_password("Enter password: ").unwrap_or_default()
});
let password = password
.unwrap_or_else(|| rpassword::prompt_password("Enter password: ").unwrap_or_default());
let spinner = indicatif::ProgressBar::new_spinner();
spinner.set_style(indicatif::ProgressStyle::default_spinner().template("{spinner:.blue} {msg}").unwrap());
spinner.set_style(
indicatif::ProgressStyle::default_spinner()
.template("{spinner:.blue} {msg}")
.unwrap(),
);
spinner.enable_steady_tick(std::time::Duration::from_millis(100));
spinner.set_message("Verifying and unlocking...");
reader.unlock(password.as_bytes())?;
spinner.finish_with_message("Unlocked");
// Create output directory
std::fs::create_dir_all(&output)?;
// List and extract files
let files = reader.list_files()?;
let pb = indicatif::ProgressBar::new(files.len() as u64);
pb.set_style(indicatif::ProgressStyle::default_bar()
.template("{spinner:.green} [{elapsed_precise}] [{bar:40.cyan/blue}] {pos}/{len} ({eta}) {msg}")
.unwrap()
.progress_chars("#>-"));
for file in &files {
pb.set_message(format!("{:?}", file.file_name().unwrap_or_default()));
let output_path = output.join(file);
@@ -224,42 +247,43 @@ fn extract_archive(
pb.inc(1);
}
pb.finish_with_message("Extraction complete!");
Ok(())
}
fn list_archive(archive: PathBuf, password: Option<String>) -> Result<(), Box<dyn std::error::Error>> {
fn list_archive(
archive: PathBuf,
password: Option<String>,
) -> Result<(), Box<dyn std::error::Error>> {
let mut reader = ArchiveReader::open(&archive)?;
let password = password.unwrap_or_else(|| {
rpassword::prompt_password("Enter password: ").unwrap_or_default()
});
let password = password
.unwrap_or_else(|| rpassword::prompt_password("Enter password: ").unwrap_or_default());
reader.unlock(password.as_bytes())?;
let files = reader.list_files()?;
println!("Files in archive:");
for file in &files {
println!(" {:?}", file);
}
Ok(())
}
fn info_archive(archive: PathBuf) -> Result<(), Box<dyn std::error::Error>> {
let reader = ArchiveReader::open(&archive)?;
let info = reader.get_info();
println!("Archive Information:");
println!(" Max attempts: {}", info.max_attempts);
println!(" Current attempts: {}", info.current_attempts);
println!(" Remaining attempts: {}", info.remaining_attempts);
println!(" Destroyed: {}", info.destroyed);
println!(" File count: {}", info.file_count);
if info.remaining_attempts <= 2 {
eprintln!("Warning: Low remaining attempts!");
}
Ok(())
}

View File

@@ -65,4 +65,3 @@ pub enum SecureArcError {
#[error("Archive is empty")]
EmptyArchive,
}

View File

@@ -5,6 +5,5 @@ pub mod reader;
pub mod writer;
pub use error::SecureArcError;
pub use reader::{ArchiveReader, ArchiveInfo};
pub use writer::{ArchiveWriter, ArchiveConfig};
pub use reader::{ArchiveInfo, ArchiveReader};
pub use writer::{ArchiveConfig, ArchiveWriter};

View File

@@ -1,13 +1,13 @@
//! Archive reader for reading and extracting SecureArc files
use crate::compression::decompress_data;
use crate::format::CompressionAlgorithm;
use crate::crypto::encryption::{decrypt_data, EncryptionKey};
use crate::crypto::integrity::IntegrityKey;
use crate::crypto::kdf::{derive_key, KdfParams};
use crate::format::directory::CentralDirectory;
use crate::format::header::SecurityHeader;
use crate::format::keyslot::KeySlot;
use crate::format::CompressionAlgorithm;
use crate::format::{CompressionAlgorithm as FormatCompression, EncryptionAlgorithm, MAGIC_NUMBER};
use crate::self_destruct::counter::AttemptCounter;
use crate::self_destruct::destruction::SelfDestruct;
@@ -32,7 +32,10 @@ pub struct ArchiveReader {
impl ArchiveReader {
/// Open an archive file for reading
pub fn open<P: AsRef<Path>>(archive_path: P) -> Result<Self, SecureArcError> {
let file = OpenOptions::new().read(true).write(true).open(archive_path)?;
let file = OpenOptions::new()
.read(true)
.write(true)
.open(archive_path)?;
let mut reader = BufReader::new(file);
// Read and verify magic number
@@ -64,45 +67,45 @@ impl ArchiveReader {
let header_offset = 8; // After magic number
let header_total_size = 4 + header_data.len() as u64; // +4 for size prefix
let key_slots_offset = header_offset + header_total_size;
// Read key slots count and data
let mut key_slots = Vec::new();
let mut slot_count_bytes = [0u8; 4];
reader.read_exact(&mut slot_count_bytes)?;
let slot_count = u32::from_le_bytes(slot_count_bytes) as usize;
let mut current_offset = key_slots_offset + 4; // +4 for slot count
for _ in 0..slot_count {
// Read slot size
let mut slot_size_bytes = [0u8; 4];
reader.read_exact(&mut slot_size_bytes)?;
let slot_size = u32::from_le_bytes(slot_size_bytes) as usize;
// Read slot data
let mut slot_data = vec![0u8; slot_size];
reader.read_exact(&mut slot_data)?;
// Deserialize slot
let slot = bincode::deserialize(&slot_data).map_err(|e| {
SecureArcError::KeySlotError(format!("Failed to deserialize key slot: {}", e))
})?;
key_slots.push(slot);
current_offset += 4 + slot_size as u64; // +4 for size prefix
}
let directory_offset = current_offset;
reader.seek(SeekFrom::Start(directory_offset))?;
// Read directory size and encrypted directory
let mut dir_size_bytes = [0u8; 8];
reader.read_exact(&mut dir_size_bytes)?;
let dir_size = u64::from_le_bytes(dir_size_bytes) as usize;
let mut encrypted_directory = vec![0u8; dir_size];
reader.read_exact(&mut encrypted_directory)?;
let payload_offset = directory_offset + 8 + dir_size as u64 + 8;
// Extract algorithms before moving header
let encryption_algorithm = header.encryption_algorithm;
let compression_algorithm = header.compression_algorithm;
@@ -143,21 +146,20 @@ impl ArchiveReader {
},
)?)?;
// Verify header checksum
let counter = AttemptCounter::new(integrity_key.clone());
if counter.verify_checksum(&self.header).is_err() {
// Checksum verification failed - increment counter
counter.increment(&mut self.header)?;
// Check if we should destroy
if counter.should_destroy(&self.header) {
SelfDestruct::execute_destruction(&mut self.header, &mut self.key_slots)?;
self.update_file()?;
return Err(SecureArcError::MaxAttemptsExceeded);
}
self.update_file()?;
return Err(SecureArcError::InvalidPassword);
}
@@ -179,22 +181,18 @@ impl ArchiveReader {
&encryption_key,
self.encryption_algorithm,
)
.map_err(|_e| {
SecureArcError::InvalidPassword
})?;
.map_err(|_e| SecureArcError::InvalidPassword)?;
if master_key_data.len() != 32 {
// Wrong password - increment counter
counter.increment(&mut self.header)?;
if counter.should_destroy(&self.header) {
SelfDestruct::execute_destruction(&mut self.header, &mut self.key_slots)?;
self.update_file()?;
return Err(SecureArcError::MaxAttemptsExceeded);
}
self.update_file()?;
return Err(SecureArcError::InvalidPassword);
}
@@ -208,19 +206,19 @@ impl ArchiveReader {
let master_encryption_key = EncryptionKey::from_bytes(&master_key)?;
let current_pos = self.file.stream_position()?;
self.file.seek(SeekFrom::Start(self.directory_offset))?;
let mut dir_size_bytes = [0u8; 8];
self.file.read_exact(&mut dir_size_bytes)?;
let dir_size = u64::from_le_bytes(dir_size_bytes) as usize;
let mut encrypted_directory = vec![0u8; dir_size];
self.file.read_exact(&mut encrypted_directory)?;
let directory_data = decrypt_data(
&encrypted_directory,
&master_encryption_key,
self.encryption_algorithm,
)?;
self.directory = bincode::deserialize(&directory_data).map_err(|e| {
SecureArcError::FormatError(format!("Failed to deserialize directory: {}", e))
})?;
@@ -236,7 +234,12 @@ impl ArchiveReader {
"Archive must be unlocked first".to_string(),
));
}
Ok(self.directory.entries().iter().map(|e| e.path.clone()).collect())
Ok(self
.directory
.entries()
.iter()
.map(|e| e.path.clone())
.collect())
}
/// Extract a file from the archive
@@ -256,8 +259,9 @@ impl ArchiveReader {
.ok_or_else(|| SecureArcError::FileNotFound(archive_path.display().to_string()))?;
// Read encrypted file data
self.file.seek(SeekFrom::Start(self.payload_offset + entry.data_offset))?;
self.file
.seek(SeekFrom::Start(self.payload_offset + entry.data_offset))?;
// Read encrypted data using stored encrypted_size
let mut encrypted_data = vec![0u8; entry.encrypted_size as usize];
self.file.read_exact(&mut encrypted_data)?;
@@ -265,11 +269,8 @@ impl ArchiveReader {
// Decrypt data
let encryption_key = EncryptionKey::from_bytes(&master_key)?;
let compressed_data = decrypt_data(&encrypted_data, &encryption_key, self.encryption_algorithm)
.map_err(|e| {
e
})?;
let compressed_data =
decrypt_data(&encrypted_data, &encryption_key, self.encryption_algorithm)?;
// Decompress data
let compression_algo = match self.compression_algorithm {
@@ -290,7 +291,10 @@ impl ArchiveReader {
ArchiveInfo {
max_attempts: self.header.max_attempts,
current_attempts: self.header.attempt_counter,
remaining_attempts: self.header.max_attempts.saturating_sub(self.header.attempt_counter),
remaining_attempts: self
.header
.max_attempts
.saturating_sub(self.header.attempt_counter),
destroyed: self.header.destroyed,
file_count: self.directory.entries().len(),
}
@@ -299,38 +303,43 @@ impl ArchiveReader {
/// Update the archive file with current header and key slots
fn update_file(&mut self) -> Result<(), SecureArcError> {
let current_pos = self.file.stream_position()?;
// Write header
self.file.seek(SeekFrom::Start(8))?; // Skip magic number
let header_data = bincode::serialize(&self.header).map_err(|e| {
SecureArcError::FormatError(format!("Failed to serialize header: {}", e))
})?;
// Write header size and data
self.file.get_mut().write_all(&(header_data.len() as u32).to_le_bytes())?;
self.file
.get_mut()
.write_all(&(header_data.len() as u32).to_le_bytes())?;
self.file.get_mut().write_all(&header_data)?;
// Write key slots
// Note: we assume key slots count hasn't changed, only content (e.g. zeroization)
let key_slots_count_offset = 8 + 4 + header_data.len() as u64; // magic + header_size + header
self.file.seek(SeekFrom::Start(key_slots_count_offset))?;
self.file.get_mut().write_all(&(self.key_slots.len() as u32).to_le_bytes())?;
self.file
.get_mut()
.write_all(&(self.key_slots.len() as u32).to_le_bytes())?;
for slot in &self.key_slots {
let slot_data = bincode::serialize(slot).map_err(|e| {
SecureArcError::KeySlotError(format!("Failed to serialize key slot: {}", e))
})?;
self.file.get_mut().write_all(&(slot_data.len() as u32).to_le_bytes())?;
self.file
.get_mut()
.write_all(&(slot_data.len() as u32).to_le_bytes())?;
self.file.get_mut().write_all(&slot_data)?;
}
self.file.get_mut().flush()?;
self.file.seek(SeekFrom::Start(current_pos))?;
Ok(())
}
}
/// Archive information
@@ -342,4 +351,3 @@ pub struct ArchiveInfo {
pub destroyed: bool,
pub file_count: usize,
}

View File

@@ -1,13 +1,13 @@
//! Archive writer for creating SecureArc files
use crate::compression::compress_data;
use crate::format::CompressionAlgorithm;
use crate::crypto::encryption::{encrypt_data, generate_master_key, EncryptionKey};
use crate::crypto::integrity::{compute_checksum, IntegrityKey};
use crate::crypto::kdf::{derive_key, KdfParams};
use crate::format::directory::{CentralDirectory, FileEntry};
use crate::format::header::SecurityHeader;
use crate::format::keyslot::{KeySlot, MASTER_KEY_SIZE};
use crate::format::CompressionAlgorithm;
use crate::format::{CompressionAlgorithm as FormatCompression, EncryptionAlgorithm, MAGIC_NUMBER};
use crate::SecureArcError;
use std::fs::File;
@@ -51,7 +51,7 @@ impl ArchiveWriter {
/// Create a new archive writer with the given configuration
pub fn new(config: ArchiveConfig) -> Self {
let master_key = generate_master_key();
ArchiveWriter {
config,
master_key,
@@ -68,7 +68,7 @@ impl ArchiveWriter {
) -> Result<(), SecureArcError> {
let path = file_path.as_ref();
let file_data = std::fs::read(path)?;
// Get file metadata
let metadata = std::fs::metadata(path)?;
let modified_time = metadata
@@ -151,7 +151,7 @@ impl ArchiveWriter {
// Create key slots (encrypt master key with derived key)
let mut key_slots = Vec::new();
let encryption_key = EncryptionKey::from_bytes(&derived_key)?;
// Primary key slot
let master_key_encrypted = encrypt_data(
&self.master_key,
@@ -168,7 +168,6 @@ impl ArchiveWriter {
header.checksum = compute_checksum(&header_data, &integrity_key);
// Write header (with size prefix)
let header_data = bincode::serialize(&header).map_err(|e| {
SecureArcError::FormatError(format!("Failed to serialize header: {}", e))
@@ -207,7 +206,10 @@ impl ArchiveWriter {
}
/// Serialize header for HMAC computation
fn serialize_header_for_hmac(&self, header: &SecurityHeader) -> Result<Vec<u8>, SecureArcError> {
fn serialize_header_for_hmac(
&self,
header: &SecurityHeader,
) -> Result<Vec<u8>, SecureArcError> {
let mut temp_header = header.clone();
temp_header.checksum = [0u8; 32];
let result = bincode::serialize(&temp_header).map_err(|e| {
@@ -217,4 +219,3 @@ impl ArchiveWriter {
Ok(result)
}
}

View File

@@ -15,22 +15,22 @@ pub enum CompressionError {
}
/// Compress data using the specified algorithm
pub fn compress_data(
data: &[u8],
algorithm: FormatCompression,
) -> Result<Vec<u8>, SecureArcError> {
pub fn compress_data(data: &[u8], algorithm: FormatCompression) -> Result<Vec<u8>, SecureArcError> {
match algorithm {
FormatCompression::None => Ok(data.to_vec()),
FormatCompression::Lzma2 => {
use lzma_rs::lzma_compress;
let mut output = Vec::new();
lzma_compress(&mut std::io::Cursor::new(data), &mut output)
.map_err(|e| SecureArcError::CompressionError(format!("LZMA2 compression failed: {}", e)))?;
lzma_compress(&mut std::io::Cursor::new(data), &mut output).map_err(|e| {
SecureArcError::CompressionError(format!("LZMA2 compression failed: {}", e))
})?;
Ok(output)
}
FormatCompression::Zstd => {
zstd::encode_all(data, 3) // Level 3 compression
.map_err(|e| SecureArcError::CompressionError(format!("Zstd compression failed: {}", e)))
.map_err(|e| {
SecureArcError::CompressionError(format!("Zstd compression failed: {}", e))
})
}
FormatCompression::Brotli => {
let params = BrotliEncoderParams {
@@ -44,9 +44,9 @@ pub fn compress_data(
4096, // Buffer size
&params,
);
writer
.write_all(data)
.map_err(|e| SecureArcError::CompressionError(format!("Brotli compression failed: {}", e)))?;
writer.write_all(data).map_err(|e| {
SecureArcError::CompressionError(format!("Brotli compression failed: {}", e))
})?;
}
Ok(output)
}
@@ -63,19 +63,21 @@ pub fn decompress_data(
FormatCompression::Lzma2 => {
use lzma_rs::lzma_decompress;
let mut output = Vec::new();
lzma_decompress(&mut std::io::Cursor::new(compressed_data), &mut output)
.map_err(|e| SecureArcError::CompressionError(format!("LZMA2 decompression failed: {}", e)))?;
lzma_decompress(&mut std::io::Cursor::new(compressed_data), &mut output).map_err(
|e| SecureArcError::CompressionError(format!("LZMA2 decompression failed: {}", e)),
)?;
Ok(output)
}
FormatCompression::Zstd => {
zstd::decode_all(compressed_data)
.map_err(|e| SecureArcError::CompressionError(format!("Zstd decompression failed: {}", e)))
}
FormatCompression::Zstd => zstd::decode_all(compressed_data).map_err(|e| {
SecureArcError::CompressionError(format!("Zstd decompression failed: {}", e))
}),
FormatCompression::Brotli => {
let mut output = Vec::new();
brotli::Decompressor::new(std::io::Cursor::new(compressed_data), 4096)
.read_to_end(&mut output)
.map_err(|e| SecureArcError::CompressionError(format!("Brotli decompression failed: {}", e)))?;
.map_err(|e| {
SecureArcError::CompressionError(format!("Brotli decompression failed: {}", e))
})?;
Ok(output)
}
}
@@ -109,4 +111,3 @@ mod tests {
assert_eq!(data, decompressed.as_slice());
}
}

View File

@@ -3,4 +3,3 @@
pub mod algorithms;
pub use algorithms::{compress_data, decompress_data, CompressionError};

View File

@@ -68,14 +68,17 @@ pub fn encrypt_data(
let rng = SystemRandom::new();
let mut nonce_bytes = [0u8; AES_NONCE_SIZE];
rng.fill(&mut nonce_bytes)
.map_err(|e| SecureArcError::EncryptionError(format!("Failed to generate nonce: {}", e)))?;
rng.fill(&mut nonce_bytes).map_err(|e| {
SecureArcError::EncryptionError(format!("Failed to generate nonce: {}", e))
})?;
let nonce = Nonce::assume_unique_for_key(nonce_bytes);
let key = LessSafeKey::new(unbound_key);
let mut in_out = data.to_vec();
key.seal_in_place_append_tag(nonce, Aad::empty(), &mut in_out)
.map_err(|e| SecureArcError::EncryptionError(format!("Encryption failed: {}", e)))?;
.map_err(|e| {
SecureArcError::EncryptionError(format!("Encryption failed: {}", e))
})?;
// Prepend nonce
let mut result = nonce_bytes.to_vec();
@@ -83,11 +86,11 @@ pub fn encrypt_data(
Ok(result)
}
EncryptionAlgorithm::ChaCha20Poly1305 => {
let cipher = ChaCha20Poly1305::new(Key::from_slice(key.as_bytes()).into());
let cipher = ChaCha20Poly1305::new(Key::from_slice(key.as_bytes()));
let nonce = ChaCha20Poly1305::generate_nonce(&mut OsRng);
let ciphertext = cipher
.encrypt(&nonce, data)
.map_err(|e| SecureArcError::EncryptionError(format!("Encryption failed: {}", e)))?;
let ciphertext = cipher.encrypt(&nonce, data).map_err(|e| {
SecureArcError::EncryptionError(format!("Encryption failed: {}", e))
})?;
// Prepend nonce
let mut result = nonce.to_vec();
@@ -125,8 +128,11 @@ pub fn decrypt_data(
let key = LessSafeKey::new(unbound_key);
let mut in_out = encrypted_data[AES_NONCE_SIZE..].to_vec();
let plaintext = key.open_in_place(nonce, Aad::empty(), &mut in_out)
.map_err(|e| SecureArcError::EncryptionError(format!("Decryption failed: {}", e)))?;
let plaintext = key
.open_in_place(nonce, Aad::empty(), &mut in_out)
.map_err(|e| {
SecureArcError::EncryptionError(format!("Decryption failed: {}", e))
})?;
// open_in_place returns a slice excluding the tag, convert to Vec
Ok(plaintext.to_vec())
@@ -141,7 +147,7 @@ pub fn decrypt_data(
let nonce = Nonce::from_slice(&encrypted_data[..CHACHA20_NONCE_SIZE]);
let ciphertext = &encrypted_data[CHACHA20_NONCE_SIZE..];
let cipher = ChaCha20Poly1305::new(Key::from_slice(key.as_bytes()).into());
let cipher = ChaCha20Poly1305::new(Key::from_slice(key.as_bytes()));
cipher
.decrypt(nonce, ciphertext)
.map_err(|e| SecureArcError::EncryptionError(format!("Decryption failed: {}", e)))
@@ -159,8 +165,10 @@ mod tests {
let encryption_key = EncryptionKey::from_bytes(&key).unwrap();
let data = b"Hello, SecureArc!";
let encrypted = encrypt_data(data, &encryption_key, EncryptionAlgorithm::Aes256Gcm).unwrap();
let decrypted = decrypt_data(&encrypted, &encryption_key, EncryptionAlgorithm::Aes256Gcm).unwrap();
let encrypted =
encrypt_data(data, &encryption_key, EncryptionAlgorithm::Aes256Gcm).unwrap();
let decrypted =
decrypt_data(&encrypted, &encryption_key, EncryptionAlgorithm::Aes256Gcm).unwrap();
assert_eq!(data, decrypted.as_slice());
}
@@ -171,10 +179,15 @@ mod tests {
let encryption_key = EncryptionKey::from_bytes(&key).unwrap();
let data = b"Hello, SecureArc!";
let encrypted = encrypt_data(data, &encryption_key, EncryptionAlgorithm::ChaCha20Poly1305).unwrap();
let decrypted = decrypt_data(&encrypted, &encryption_key, EncryptionAlgorithm::ChaCha20Poly1305).unwrap();
let encrypted =
encrypt_data(data, &encryption_key, EncryptionAlgorithm::ChaCha20Poly1305).unwrap();
let decrypted = decrypt_data(
&encrypted,
&encryption_key,
EncryptionAlgorithm::ChaCha20Poly1305,
)
.unwrap();
assert_eq!(data, decrypted.as_slice());
}
}

View File

@@ -38,8 +38,8 @@ impl IntegrityKey {
/// Compute HMAC-SHA256 checksum
pub fn compute_checksum(data: &[u8], key: &IntegrityKey) -> [u8; HMAC_SIZE] {
let mut mac = HmacSha256::new_from_slice(key.as_bytes())
.expect("HMAC can take key of any size");
let mut mac =
HmacSha256::new_from_slice(key.as_bytes()).expect("HMAC can take key of any size");
mac.update(data);
let result = mac.finalize();
let mut checksum = [0u8; HMAC_SIZE];
@@ -102,4 +102,3 @@ mod tests {
assert!(verify_checksum(data, &key, &wrong_checksum).is_err());
}
}

View File

@@ -79,7 +79,10 @@ pub fn derive_key(
argon2
.hash_password_into(password, salt, &mut key)
.map_err(|e| {
SecureArcError::KeyDerivationError(format!("Argon2 key derivation failed: {}", e))
SecureArcError::KeyDerivationError(format!(
"Argon2 key derivation failed: {}",
e
))
})?;
}
KdfAlgorithm::Pbkdf2Sha256 => {
@@ -141,4 +144,3 @@ mod tests {
assert_eq!(key1, key2);
}
}

View File

@@ -7,4 +7,3 @@ pub mod kdf;
pub use encryption::{decrypt_data, encrypt_data, generate_master_key, EncryptionKey};
pub use integrity::{compute_checksum, verify_checksum, IntegrityKey};
pub use kdf::{derive_key, KdfParams};

View File

@@ -81,4 +81,3 @@ impl Default for CentralDirectory {
Self::new()
}
}

View File

@@ -1,6 +1,8 @@
//! Security header structure and operations
use crate::format::{CompressionAlgorithm, EncryptionAlgorithm, KdfAlgorithm, MAX_MAX_ATTEMPTS, MIN_MAX_ATTEMPTS};
use crate::format::{
CompressionAlgorithm, EncryptionAlgorithm, KdfAlgorithm, MAX_MAX_ATTEMPTS, MIN_MAX_ATTEMPTS,
};
use crate::SecureArcError;
use rand::RngCore;
use serde::{Deserialize, Serialize};
@@ -42,7 +44,7 @@ pub struct SecurityHeader {
impl SecurityHeader {
/// Create a new security header with default Argon2id parameters
pub fn new(max_attempts: u32) -> Result<Self, SecureArcError> {
if max_attempts < MIN_MAX_ATTEMPTS || max_attempts > MAX_MAX_ATTEMPTS {
if !(MIN_MAX_ATTEMPTS..=MAX_MAX_ATTEMPTS).contains(&max_attempts) {
return Err(SecureArcError::InvalidConfiguration(format!(
"max_attempts must be between {} and {}",
MIN_MAX_ATTEMPTS, MAX_MAX_ATTEMPTS
@@ -88,7 +90,7 @@ impl SecurityHeader {
/// Validate header integrity and parameters
pub fn validate(&self) -> Result<(), SecureArcError> {
if self.max_attempts < MIN_MAX_ATTEMPTS || self.max_attempts > MAX_MAX_ATTEMPTS {
if !(MIN_MAX_ATTEMPTS..=MAX_MAX_ATTEMPTS).contains(&self.max_attempts) {
return Err(SecureArcError::InvalidConfiguration(format!(
"Invalid max_attempts: {} (must be between {} and {})",
self.max_attempts, MIN_MAX_ATTEMPTS, MAX_MAX_ATTEMPTS
@@ -111,4 +113,3 @@ impl SecurityHeader {
self.attempt_counter >= self.max_attempts || self.destroyed
}
}

View File

@@ -64,4 +64,3 @@ impl KeySlot {
Ok(())
}
}

View File

@@ -67,4 +67,3 @@ pub struct SecureArcFile {
/// Central directory
pub directory: directory::CentralDirectory,
}

View File

@@ -9,6 +9,5 @@ pub mod crypto;
pub mod format;
pub mod self_destruct;
pub use archive::{ArchiveReader, ArchiveWriter, SecureArcError, ArchiveInfo};
pub use archive::{ArchiveInfo, ArchiveReader, ArchiveWriter, SecureArcError};
pub use format::{CompressionAlgorithm, EncryptionAlgorithm, KdfAlgorithm};

View File

@@ -56,7 +56,10 @@ impl AttemptCounter {
}
/// Serialize header for HMAC computation (excluding checksum field)
fn serialize_header_for_hmac(&self, header: &SecurityHeader) -> Result<Vec<u8>, SecureArcError> {
fn serialize_header_for_hmac(
&self,
header: &SecurityHeader,
) -> Result<Vec<u8>, SecureArcError> {
use bincode;
// Create a temporary header without checksum for serialization
let mut temp_header = header.clone();
@@ -75,11 +78,7 @@ impl AttemptCounter {
/// Get remaining attempts before destruction
pub fn get_remaining_attempts(&self, header: &SecurityHeader) -> u32 {
if header.attempt_counter >= header.max_attempts {
0
} else {
header.max_attempts - header.attempt_counter
}
header.max_attempts.saturating_sub(header.attempt_counter)
}
/// Check if archive should be destroyed
@@ -126,4 +125,3 @@ mod tests {
assert!(counter.verify_checksum(&header).is_err());
}
}

View File

@@ -3,7 +3,7 @@
use crate::format::header::SecurityHeader;
use crate::format::keyslot::KeySlot;
use crate::SecureArcError;
use rand::{RngCore, thread_rng};
use rand::{thread_rng, RngCore};
/// Self-destruct executor
pub struct SelfDestruct;
@@ -56,10 +56,7 @@ mod tests {
#[test]
fn test_destruction_execution() {
let mut header = SecurityHeader::new(5).unwrap();
let mut key_slots = vec![
KeySlot::new(0),
KeySlot::new(1),
];
let mut key_slots = vec![KeySlot::new(0), KeySlot::new(1)];
// Initialize key slots with dummy data
key_slots[0].encrypted_key = vec![1, 2, 3, 4, 5];
@@ -76,4 +73,3 @@ mod tests {
assert!(header.destroyed);
}
}

View File

@@ -5,4 +5,3 @@ pub mod destruction;
pub use counter::AttemptCounter;
pub use destruction::SelfDestruct;

View File

@@ -1,7 +1,9 @@
//! Cryptographic component tests
use securearc_core::crypto::encryption::{decrypt_data, encrypt_data, generate_master_key, EncryptionKey};
use securearc_core::crypto::integrity::{compute_checksum, IntegrityKey, verify_checksum};
use securearc_core::crypto::encryption::{
decrypt_data, encrypt_data, generate_master_key, EncryptionKey,
};
use securearc_core::crypto::integrity::{compute_checksum, verify_checksum, IntegrityKey};
use securearc_core::crypto::kdf::{derive_key, KdfParams};
use securearc_core::format::{EncryptionAlgorithm, KdfAlgorithm};
@@ -13,12 +15,19 @@ fn test_encryption_round_trip() {
// Test AES-256-GCM
let encrypted = encrypt_data(data, &encryption_key, EncryptionAlgorithm::Aes256Gcm).unwrap();
let decrypted = decrypt_data(&encrypted, &encryption_key, EncryptionAlgorithm::Aes256Gcm).unwrap();
let decrypted =
decrypt_data(&encrypted, &encryption_key, EncryptionAlgorithm::Aes256Gcm).unwrap();
assert_eq!(data, decrypted.as_slice());
// Test ChaCha20-Poly1305
let encrypted = encrypt_data(data, &encryption_key, EncryptionAlgorithm::ChaCha20Poly1305).unwrap();
let decrypted = decrypt_data(&encrypted, &encryption_key, EncryptionAlgorithm::ChaCha20Poly1305).unwrap();
let encrypted =
encrypt_data(data, &encryption_key, EncryptionAlgorithm::ChaCha20Poly1305).unwrap();
let decrypted = decrypt_data(
&encrypted,
&encryption_key,
EncryptionAlgorithm::ChaCha20Poly1305,
)
.unwrap();
assert_eq!(data, decrypted.as_slice());
}
@@ -74,4 +83,3 @@ fn test_pbkdf2_kdf() {
let key2 = derive_key(password, salt, &params).unwrap();
assert_eq!(key1, key2);
}

View File

@@ -1,8 +1,8 @@
//! File format tests
use securearc_core::format::directory::{CentralDirectory, FileEntry};
use securearc_core::format::header::SecurityHeader;
use securearc_core::format::keyslot::KeySlot;
use securearc_core::format::directory::{CentralDirectory, FileEntry};
use std::io::Cursor;
use std::path::PathBuf;
@@ -40,7 +40,7 @@ fn test_key_slot_zeroization() {
#[test]
fn test_directory_operations() {
let mut directory = CentralDirectory::new();
let entry = FileEntry {
path: PathBuf::from("test.txt"),
original_size: 100,
@@ -50,12 +50,11 @@ fn test_directory_operations() {
attributes: 0,
data_offset: 0,
};
directory.add_entry(entry);
assert_eq!(directory.entries().len(), 1);
let found = directory.find_entry(&PathBuf::from("test.txt"));
assert!(found.is_some());
assert_eq!(found.unwrap().original_size, 100);
}

View File

@@ -11,27 +11,29 @@ fn test_create_and_extract_archive() {
let temp_dir = TempDir::new().unwrap();
let archive_path = temp_dir.path().join("test.sarc");
let test_file = temp_dir.path().join("test.txt");
// Create test file
fs::write(&test_file, b"Hello, SecureArc!").unwrap();
// Create archive
let config = ArchiveConfig::default();
let mut writer = ArchiveWriter::new(config);
writer.add_file(&test_file, PathBuf::from("test.txt")).unwrap();
writer
.add_file(&test_file, PathBuf::from("test.txt"))
.unwrap();
writer.write_to_file(&archive_path, b"password123").unwrap();
// Extract archive
let mut reader = ArchiveReader::open(&archive_path).unwrap();
reader.unlock(b"password123").unwrap();
let files = reader.list_files().unwrap();
assert_eq!(files.len(), 1);
assert_eq!(files[0], PathBuf::from("test.txt"));
let output_file = temp_dir.path().join("extracted.txt");
reader.extract_file(&files[0], &output_file).unwrap();
let content = fs::read(&output_file).unwrap();
assert_eq!(content, b"Hello, SecureArc!");
}
@@ -41,32 +43,36 @@ fn test_self_destruct_mechanism() {
let temp_dir = TempDir::new().unwrap();
let archive_path = temp_dir.path().join("test.sarc");
let test_file = temp_dir.path().join("test.txt");
// Create test file
fs::write(&test_file, b"Test data").unwrap();
// Create archive with max_attempts = 3
let mut config = ArchiveConfig::default();
config.max_attempts = 3;
let mut writer = ArchiveWriter::new(config);
writer.add_file(&test_file, PathBuf::from("test.txt")).unwrap();
writer.write_to_file(&archive_path, b"correct_password").unwrap();
writer
.add_file(&test_file, PathBuf::from("test.txt"))
.unwrap();
writer
.write_to_file(&archive_path, b"correct_password")
.unwrap();
// Try wrong passwords
let mut reader = ArchiveReader::open(&archive_path).unwrap();
// First wrong attempt
assert!(reader.unlock(b"wrong1").is_err());
// Second wrong attempt
let mut reader = ArchiveReader::open(&archive_path).unwrap();
assert!(reader.unlock(b"wrong2").is_err());
// Third wrong attempt should destroy archive
let mut reader = ArchiveReader::open(&archive_path).unwrap();
let result = reader.unlock(b"wrong3");
assert!(result.is_err());
// Archive should be destroyed - correct password should fail
// Archive should be destroyed - opening should fail
assert!(ArchiveReader::open(&archive_path).is_err());
@@ -76,24 +82,24 @@ fn test_self_destruct_mechanism() {
fn test_multiple_files() {
let temp_dir = TempDir::new().unwrap();
let archive_path = temp_dir.path().join("test.sarc");
// Create multiple test files
let file1 = temp_dir.path().join("file1.txt");
let file2 = temp_dir.path().join("file2.txt");
fs::write(&file1, b"File 1 content").unwrap();
fs::write(&file2, b"File 2 content").unwrap();
// Create archive
let config = ArchiveConfig::default();
let mut writer = ArchiveWriter::new(config);
writer.add_file(&file1, PathBuf::from("file1.txt")).unwrap();
writer.add_file(&file2, PathBuf::from("file2.txt")).unwrap();
writer.write_to_file(&archive_path, b"password").unwrap();
// List files
let mut reader = ArchiveReader::open(&archive_path).unwrap();
reader.unlock(b"password").unwrap();
let files = reader.list_files().unwrap();
assert_eq!(files.len(), 2);
}
@@ -103,26 +109,30 @@ fn test_different_encryption_algorithms() {
let temp_dir = TempDir::new().unwrap();
let test_file = temp_dir.path().join("test.txt");
fs::write(&test_file, b"Test data").unwrap();
// Test AES-256-GCM
let archive_path1 = temp_dir.path().join("test_aes.sarc");
let mut config = ArchiveConfig::default();
config.encryption_algorithm = EncryptionAlgorithm::Aes256Gcm;
let mut writer = ArchiveWriter::new(config);
writer.add_file(&test_file, PathBuf::from("test.txt")).unwrap();
writer
.add_file(&test_file, PathBuf::from("test.txt"))
.unwrap();
writer.write_to_file(&archive_path1, b"password").unwrap();
let mut reader = ArchiveReader::open(&archive_path1).unwrap();
assert!(reader.unlock(b"password").is_ok());
// Test ChaCha20-Poly1305
let archive_path2 = temp_dir.path().join("test_chacha.sarc");
let mut config = ArchiveConfig::default();
config.encryption_algorithm = EncryptionAlgorithm::ChaCha20Poly1305;
let mut writer = ArchiveWriter::new(config);
writer.add_file(&test_file, PathBuf::from("test.txt")).unwrap();
writer
.add_file(&test_file, PathBuf::from("test.txt"))
.unwrap();
writer.write_to_file(&archive_path2, b"password").unwrap();
let mut reader = ArchiveReader::open(&archive_path2).unwrap();
assert!(reader.unlock(b"password").is_ok());
}
@@ -132,7 +142,7 @@ fn test_different_compression_algorithms() {
let temp_dir = TempDir::new().unwrap();
let test_file = temp_dir.path().join("test.txt");
fs::write(&test_file, b"Test data for compression testing").unwrap();
for &algo in &[
CompressionAlgorithm::Lzma2,
CompressionAlgorithm::Zstd,
@@ -142,11 +152,12 @@ fn test_different_compression_algorithms() {
let mut config = ArchiveConfig::default();
config.compression_algorithm = algo;
let mut writer = ArchiveWriter::new(config);
writer.add_file(&test_file, PathBuf::from("test.txt")).unwrap();
writer
.add_file(&test_file, PathBuf::from("test.txt"))
.unwrap();
writer.write_to_file(&archive_path, b"password").unwrap();
let mut reader = ArchiveReader::open(&archive_path).unwrap();
assert!(reader.unlock(b"password").is_ok());
}
}

View File

@@ -1,3 +1,3 @@
fn main() {
tauri_build::build()
tauri_build::build()
}

View File

@@ -57,7 +57,12 @@ fn create_archive(window: tauri::Window, request: CreateArchiveRequest) -> Resul
let encryption_algorithm = match request.encryption.to_lowercase().as_str() {
"aes256" | "aes" => EncryptionAlgorithm::Aes256Gcm,
"chacha20" | "chacha" => EncryptionAlgorithm::ChaCha20Poly1305,
_ => return Err(format!("Unknown encryption algorithm: {}", request.encryption)),
_ => {
return Err(format!(
"Unknown encryption algorithm: {}",
request.encryption
))
}
};
let compression_algorithm = match request.compression.to_lowercase().as_str() {
@@ -65,7 +70,12 @@ fn create_archive(window: tauri::Window, request: CreateArchiveRequest) -> Resul
"zstd" => CompressionAlgorithm::Zstd,
"brotli" => CompressionAlgorithm::Brotli,
"none" => CompressionAlgorithm::None,
_ => return Err(format!("Unknown compression algorithm: {}", request.compression)),
_ => {
return Err(format!(
"Unknown compression algorithm: {}",
request.compression
))
}
};
let config = ArchiveConfig {
@@ -89,45 +99,72 @@ fn create_archive(window: tauri::Window, request: CreateArchiveRequest) -> Resul
let entry = entry.map_err(|e| format!("Failed to read directory entry: {}", e))?;
if entry.file_type().is_file() {
let file_path = entry.path();
// Create archive path relative to the selected folder's parent
// e.g. selecting /foo/bar and archiving /foo/bar/baz.txt -> bar/baz.txt
let parent = path.parent().unwrap_or(&path);
let archive_path = file_path.strip_prefix(parent)
.unwrap_or_else(|_| file_path.file_name().map(|n| Path::new(n)).unwrap_or(file_path))
let archive_path = file_path
.strip_prefix(parent)
.unwrap_or_else(|_| {
file_path
.file_name()
.map(Path::new)
.unwrap_or(file_path)
})
.to_path_buf();
// Emit progress
let _ = window.emit("create-progress", ProgressPayload {
current: i + 1, // Only tracking top level items for now in 'total', imperfect but functional
total: total_files,
filename: file_path.file_name().unwrap_or_default().to_string_lossy().to_string(),
status: "processing".to_string(),
});
// Emit progress
let _ = window.emit(
"create-progress",
ProgressPayload {
current: i + 1, // Only tracking top level items for now in 'total', imperfect but functional
total: total_files,
filename: file_path
.file_name()
.unwrap_or_default()
.to_string_lossy()
.to_string(),
status: "processing".to_string(),
},
);
writer.add_file(file_path, archive_path)
writer
.add_file(file_path, archive_path)
.map_err(|e| format!("Failed to add file: {}", e))?;
}
}
} else {
let archive_path = path.file_name()
.map(|n| PathBuf::from(n))
let archive_path = path
.file_name()
.map(PathBuf::from)
.unwrap_or_else(|| path.clone());
// Emit start progress for this file
let _ = window.emit("create-progress", ProgressPayload {
current: i + 1,
total: total_files,
filename: path.file_name().unwrap_or_default().to_string_lossy().to_string(),
status: "processing".to_string(),
});
writer.add_file(&path, archive_path)
// Emit start progress for this file
let _ = window.emit(
"create-progress",
ProgressPayload {
current: i + 1,
total: total_files,
filename: path
.file_name()
.unwrap_or_default()
.to_string_lossy()
.to_string(),
status: "processing".to_string(),
},
);
writer
.add_file(&path, archive_path)
.map_err(|e| format!("Failed to add file: {}", e))?;
}
}
writer.write_to_file(&PathBuf::from(&request.output_path), request.password.as_bytes())
writer
.write_to_file(
PathBuf::from(&request.output_path),
request.password.as_bytes(),
)
.map_err(|e| format!("Failed to create archive: {}", e))?;
Ok(())
@@ -135,13 +172,15 @@ fn create_archive(window: tauri::Window, request: CreateArchiveRequest) -> Resul
#[tauri::command]
fn extract_archive(window: tauri::Window, request: ExtractArchiveRequest) -> Result<(), String> {
let mut reader = ArchiveReader::open(&PathBuf::from(&request.archive_path))
let mut reader = ArchiveReader::open(PathBuf::from(&request.archive_path))
.map_err(|e| format!("Failed to open archive: {}", e))?;
reader.unlock(request.password.as_bytes())
reader
.unlock(request.password.as_bytes())
.map_err(|e| format!("Failed to unlock archive: {}", e))?;
let files = reader.list_files()
let files = reader
.list_files()
.map_err(|e| format!("Failed to list files: {}", e))?;
std::fs::create_dir_all(&request.output_path)
@@ -157,14 +196,18 @@ fn extract_archive(window: tauri::Window, request: ExtractArchiveRequest) -> Res
}
// Emit progress
let _ = window.emit("extract-progress", ProgressPayload {
current: i + 1,
total: total_files,
filename: file.to_string_lossy().to_string(),
status: "extracting".to_string(),
});
let _ = window.emit(
"extract-progress",
ProgressPayload {
current: i + 1,
total: total_files,
filename: file.to_string_lossy().to_string(),
status: "extracting".to_string(),
},
);
reader.extract_file(file, &output_path)
reader
.extract_file(file, &output_path)
.map_err(|e| format!("Failed to extract file: {}", e))?;
}
@@ -173,14 +216,16 @@ fn extract_archive(window: tauri::Window, request: ExtractArchiveRequest) -> Res
#[tauri::command]
fn list_archive(request: ListArchiveRequest) -> Result<ArchiveInfoResponse, String> {
let mut reader = ArchiveReader::open(&PathBuf::from(&request.archive_path))
let mut reader = ArchiveReader::open(PathBuf::from(&request.archive_path))
.map_err(|e| format!("Failed to open archive: {}", e))?;
reader.unlock(request.password.as_bytes())
reader
.unlock(request.password.as_bytes())
.map_err(|e| format!("Failed to unlock archive: {}", e))?;
let info = reader.get_info();
let files = reader.list_files()
let files = reader
.list_files()
.map_err(|e| format!("Failed to list files: {}", e))?;
Ok(ArchiveInfoResponse {
@@ -195,7 +240,7 @@ fn list_archive(request: ListArchiveRequest) -> Result<ArchiveInfoResponse, Stri
#[tauri::command]
fn get_archive_info(archive_path: String) -> Result<ArchiveInfoResponse, String> {
let reader = ArchiveReader::open(&PathBuf::from(&archive_path))
let reader = ArchiveReader::open(PathBuf::from(&archive_path))
.map_err(|e| format!("Failed to open archive: {}", e))?;
let info = reader.get_info();
@@ -239,4 +284,3 @@ fn main() {
.run(tauri::generate_context!())
.expect("error while running tauri application");
}