diff --git a/src/archive.rs b/src/archive.rs index e7cd793..158d380 100644 --- a/src/archive.rs +++ b/src/archive.rs @@ -4,9 +4,9 @@ use std::cmp::Ordering; use std::collections::HashMap; use std::fs::File; -use std::io::{self, Read, Result, Write}; +use std::io::{self, Read, Result, Seek, Write}; use std::mem; -use std::path::{Path, PathBuf}; +use std::path::Path; use crate::fnv1a64_hash_string; use crate::io::*; @@ -15,23 +15,22 @@ use crate::kraken::*; use byteorder::{LittleEndian, ReadBytesExt, WriteBytesExt}; #[derive(Debug, Clone, Default)] -pub(crate) struct Archive { - pub(crate) header: Header, - pub(crate) index: Index, +pub struct Archive { + pub header: Header, + pub index: Index, // custom - pub(crate) file_names: HashMap, + pub file_names: HashMap, } impl Archive { // Function to read a Header from a file - pub fn from_file

(file_path: &P) -> Result + pub fn from_file

(file_path: P) -> Result where P: AsRef, { let mut file = File::open(file_path)?; let mut buffer = Vec::with_capacity(mem::size_of::

()); - file.read_to_end(&mut buffer)?; // Ensure that the buffer has enough bytes to represent a Header @@ -43,14 +42,22 @@ impl Archive { } let mut cursor = io::Cursor::new(&buffer); - let header = Header::from_reader(&mut cursor)?; + + Archive::from_reader(&mut cursor) + } + + pub fn from_reader(cursor: &mut R) -> Result + where + R: Read + Seek, + { + let header = Header::from_reader(cursor)?; // read custom data let mut file_names: HashMap = HashMap::default(); if let Ok(custom_data_length) = cursor.read_u32::() { if custom_data_length > 0 { - cursor.set_position(Header::HEADER_EXTENDED_SIZE); - if let Ok(footer) = LxrsFooter::from_reader(&mut cursor) { + cursor.seek(io::SeekFrom::Start(Header::HEADER_EXTENDED_SIZE))?; + if let Ok(footer) = LxrsFooter::from_reader(cursor) { // add files to hashmap for f in footer.files { let hash = fnv1a64_hash_string(&f); @@ -61,8 +68,8 @@ impl Archive { } // move to offset Header.IndexPosition - cursor.set_position(header.index_position); - let index = Index::from_reader(&mut cursor)?; + cursor.seek(io::SeekFrom::Start(header.index_position))?; + let index = Index::from_reader(cursor)?; Ok(Archive { header, @@ -72,7 +79,7 @@ impl Archive { } // get filehashes - pub(crate) fn get_file_hashes(&self) -> Vec { + pub fn get_file_hashes(&self) -> Vec { self.index .file_entries .iter() @@ -82,7 +89,7 @@ impl Archive { } #[derive(Debug, Clone, Copy)] -pub(crate) struct Header { +pub struct Header { pub magic: u32, pub version: u32, pub index_position: u64, @@ -376,21 +383,21 @@ mod integration_tests { #[test] fn read_archive() { let archive_path = PathBuf::from("tests").join("test1.archive"); - let result = Archive::from_file(&archive_path); + let result = Archive::from_file(archive_path); assert!(result.is_ok()); } #[test] fn read_archive2() { let archive_path = PathBuf::from("tests").join("nci.archive"); - let result = Archive::from_file(&archive_path); + let result = Archive::from_file(archive_path); assert!(result.is_ok()); } #[test] fn read_custom_data() { let archive_path = PathBuf::from("tests").join("test1.archive"); - let archive = Archive::from_file(&archive_path).expect("Could not parse archive"); + let archive = Archive::from_file(archive_path).expect("Could not parse archive"); let mut file_names = archive .file_names .values() diff --git a/src/archive_file.rs b/src/archive_file.rs index 3d46e0d..2cf67d3 100644 --- a/src/archive_file.rs +++ b/src/archive_file.rs @@ -15,7 +15,7 @@ use std::{ collections::{HashMap, HashSet}, fs::{create_dir_all, File}, io::{self, BufWriter, Read, Result, Seek, SeekFrom, Write}, - path::{Path, PathBuf}, + path::Path, }; use byteorder::{LittleEndian, ReadBytesExt, WriteBytesExt}; @@ -32,7 +32,7 @@ use crate::{ // public static void CreateFromDirectory (string sourceDirectoryName, System.IO.Stream destination); -/// Creates a zip archive in the specified stream that contains the files and directories from the specified directory. +/// Creates an archive in the specified stream that contains the files and directories from the specified directory. /// /// # Errors /// @@ -57,7 +57,7 @@ where // public static void CreateFromDirectory (string sourceDirectoryName, string destinationArchiveFileName); -/// Creates a zip archive that contains the files and directories from the specified directory. +/// Creates an archive that contains the files and directories from the specified directory. /// /// # Errors /// @@ -82,20 +82,28 @@ where // public static void ExtractToDirectory (System.IO.Stream source, string destinationDirectoryName, bool overwriteFiles); -/// Extracts all the files from the zip archive stored in the specified stream and places them in the specified destination directory on the file system, and optionally allows choosing if the files in the destination directory should be overwritten. +/// Extracts all the files from the archive stored in the specified stream and places them in the specified destination directory on the file system, and optionally allows choosing if the files in the destination directory should be overwritten. /// /// # Errors /// /// This function will return an error if any io fails. pub fn extract_to_directory( - source: R, - destination_directory_name: P, + source: &mut R, + destination_directory_name: &P, overwrite_files: bool, + hash_map: Option>, ) -> io::Result<()> where P: AsRef, - R: Read, + R: Read + Seek, { + let map = if let Some(hash_map) = hash_map { + hash_map + } else { + get_red4_hashes() + }; + + extract_archive(source, destination_directory_name, overwrite_files, &map) } // public static void ExtractToDirectory (string sourceArchiveFileName, string destinationDirectoryName, bool overwriteFiles); @@ -106,17 +114,28 @@ where /// /// This function will return an error if any io fails. pub fn extract_to_directory_path

( - source_archive_file_name: P, - destination_directory_name: P, + source_archive_file_name: &P, + destination_directory_name: &P, overwrite_files: bool, + hash_map: Option>, ) -> io::Result<()> where P: AsRef, { + let map = if let Some(hash_map) = hash_map { + hash_map + } else { + get_red4_hashes() + }; + + let archive_file = File::open(source_archive_file_name)?; + let mut archive_reader = io::BufReader::new(archive_file); + extract_archive( - source_archive_file_name, + &mut archive_reader, destination_directory_name, overwrite_files, + &map, ) } @@ -126,9 +145,14 @@ pub enum ArchiveMode { Update, } +/* +TODO We don't support different modes for now +needs a wrapper class for archives + + // public static System.IO.Compression.ZipArchive Open (string archiveFileName, System.IO.Compression.ZipArchiveMode mode); -/// Opens a zip archive at the specified path and in the specified mode. +/// Opens an archive at the specified path and in the specified mode. /// /// # Errors /// @@ -140,9 +164,11 @@ where todo!() } + */ + // public static System.IO.Compression.ZipArchive OpenRead (string archiveFileName); -/// Opens a zip archive for reading at the specified path. +/// Opens an archive for reading at the specified path. /// /// # Errors /// @@ -151,7 +177,7 @@ pub fn open_read

(archive_file_name: P) -> io::Result where P: AsRef, { - todo!() + Archive::from_file(archive_file_name) } ///////////////////////////////////////////////////////////////////////////////////////// @@ -167,15 +193,18 @@ where /// # Errors /// /// This function will return an error if any parsing fails -fn extract_archive

(in_file: &P, out_dir: &P, hash_map: &HashMap) -> io::Result<()> +fn extract_archive( + archive_reader: &mut R, + out_dir: &P, + overwrite_files: bool, + hash_map: &HashMap, +) -> io::Result<()> where P: AsRef, + R: Read + Seek, { // parse archive headers - let archive = Archive::from_file(in_file)?; - - let archive_file = File::open(in_file)?; - let mut archive_reader = io::BufReader::new(archive_file); + let archive = Archive::from_reader(archive_reader)?; for (hash, file_entry) in archive.index.file_entries.iter() { // get filename @@ -188,12 +217,21 @@ where } // name or hash is a relative path - let mut: PathBuf - let outfile = out_dir.join(name_or_hash); + let outfile = out_dir.as_ref().join(name_or_hash); create_dir_all(outfile.parent().expect("Could not create an out_dir"))?; // extract to stream - let mut fs = File::create(outfile)?; + let mut fs = if overwrite_files { + File::create(outfile)? + } else { + File::options() + .read(true) + .write(true) + .create_new(true) + .open(outfile)? + }; + + //let mut fs = File::create(outfile)?; let mut file_writer = BufWriter::new(&mut fs); // decompress main file let start_index = file_entry.segments_start; @@ -209,7 +247,7 @@ where archive_reader.read_exact(&mut buffer[..])?; file_writer.write_all(&buffer)?; } else { - decompress_segment(&mut archive_reader, segment, &mut file_writer)?; + decompress_segment(archive_reader, segment, &mut file_writer)?; } } diff --git a/tests/functional_tests.rs b/tests/functional_tests.rs index 57c38c4..260db33 100644 --- a/tests/functional_tests.rs +++ b/tests/functional_tests.rs @@ -5,10 +5,12 @@ #[cfg(test)] mod tests { use std::fs::create_dir_all; + use std::path::Path; //use std::path::Path; use std::time::Instant; use std::{fs, path::PathBuf}; + use red4lib::archive_file::extract_to_directory_path; use red4lib::*; #[test] @@ -21,7 +23,6 @@ mod tests { println!("Execution time csv: {:?}", duration); } - /* #[test] fn test_extract_archive() { let archive_path = PathBuf::from("tests").join("test1.archive"); @@ -34,7 +35,7 @@ mod tests { assert!(fs::remove_dir_all(&dst_path).is_ok()); } - let result = extract_archive(&archive_path, &dst_path, &hashes); + let result = extract_to_directory_path(&archive_path, &dst_path, true, Some(hashes)); assert!(result.is_ok()); // check @@ -94,7 +95,6 @@ mod tests { assert!(fs::remove_dir_all(&dst_path).is_ok()); } } - */ #[test] fn test_pack_archive() { @@ -129,7 +129,6 @@ mod tests { // HELPERS ///////////////////////////////////////////////////////////////////////////////////////// - /* fn assert_binary_equality(e: &PathBuf, f: &PathBuf) { // compare bytes let mut fe = fs::File::open(e).expect("Could not open file"); @@ -171,5 +170,4 @@ mod tests { // Return an empty vector if there's an error Vec::new() } - */ }