You cannot select more than 25 topics Topics must start with a letter or number, can include dashes ('-') and can be up to 35 characters long.

372 lines
11 KiB
Rust

use lazy_static::lazy_static;
use remem::{ItemGuard, Pool};
use std::cmp::min;
use std::collections::{BTreeMap, HashMap, HashSet};
use std::fmt::{Debug, Formatter};
use std::fs::{DirBuilder, File, OpenOptions};
use std::io;
use std::io::{Read, Seek, SeekFrom, Write};
use std::path::{Path, PathBuf, MAIN_SEPARATOR};
use std::rc::Rc;
use std::sync::Mutex;
use torment_core::metainfo::{MetaInfoObject, Torrent};
use torment_core::REQUEST_SIZE;
lazy_static! {
static ref MEMORY_POOL: Pool<Vec<u8>> = Pool::new(|| vec![0u8; 4194304]);
}
#[derive(Debug)]
pub struct StorageMap {
base_path: PathBuf,
piece_length: usize,
bits_in_piece: usize,
pieces: usize,
open_files: HashMap<usize, Rc<Mutex<File>>>,
buffer: HashMap<usize, StoragePiece>,
mapping: BTreeMap<usize, StorageMapping>,
size: usize,
}
pub struct StoragePiece {
index: usize,
bits: usize,
ranges: HashSet<usize>,
buffer: ItemGuard<'static, Vec<u8>>,
}
impl StoragePiece {
fn is_complete(&self) -> bool {
self.bits == self.ranges.len()
}
}
impl Debug for StoragePiece {
fn fmt(&self, f: &mut Formatter<'_>) -> std::fmt::Result {
f.debug_struct("StoragePiece")
.field("index", &self.index)
.field("ranges", &self.ranges)
.finish()
}
}
impl StorageMap {
fn get_offsets(&self, offset: usize, length: usize) -> Vec<usize> {
let end = offset + length;
let mut range = self.mapping.range(offset..);
let mut items = vec![];
while let Some((item_end, _)) = range.next() {
items.push(*item_end);
if *item_end > end {
break;
}
}
items
}
fn get_file(&mut self, item: &StorageMapping) -> io::Result<Rc<Mutex<File>>> {
if let Some(file) = self.open_files.get(&item.offset) {
return Ok(Rc::clone(file));
}
let path = item.path.join(&format!("{}", MAIN_SEPARATOR));
let file_path = self.base_path.join(path);
println!("Opening file: {:?}", file_path);
if let Some(parent) = file_path.parent() {
DirBuilder::new().recursive(true).create(parent)?;
}
let file = OpenOptions::new()
.write(true)
.read(true)
.create(true)
.open(file_path.as_path())?;
let rc = Rc::new(Mutex::new(file));
self.open_files.insert(item.offset, Rc::clone(&rc));
Ok(rc)
}
fn internal_write<T: AsRef<[u8]>>(&mut self, offset: usize, data: T) -> io::Result<()> {
let data = data.as_ref();
let mut written = 0;
for item_end in self.get_offsets(offset, data.len()) {
let item = self.mapping.get(&item_end).unwrap().clone();
let file = self.get_file(&item)?;
let mut file = file.lock().unwrap();
let curr_offset = (offset + written) - item.offset;
file.seek(SeekFrom::Start(curr_offset as u64))?;
let to_write = min(item.length - curr_offset, data.len() - written);
file.write_all(&data[written..written + to_write])?;
written += to_write;
}
assert_eq!(written, data.len(), "Failed to write all data");
Ok(())
}
fn internal_read<T: AsMut<[u8]>>(&mut self, offset: usize, mut data: T) -> io::Result<()> {
let data = data.as_mut();
let mut readed = 0;
for item_end in self.get_offsets(offset, data.len()) {
let item = self.mapping.get(&item_end).unwrap().clone();
let file = self.get_file(&item)?;
let mut file = file.lock().unwrap();
let curr_offset = (offset + readed) - item.offset;
file.seek(SeekFrom::Start(curr_offset as u64))?;
let to_read = min(item.length - curr_offset, data.len() - readed);
file.read_exact(&mut data[readed..readed + to_read])?;
readed += to_read;
}
assert_eq!(readed, data.len(), "Failed to read all data");
Ok(())
}
pub fn has_piece(&self, index: usize) -> bool {
if let Some(piece) = self.buffer.get(&index) {
piece.is_complete()
} else {
false
}
}
pub fn read_piece(&mut self, index: usize) -> io::Result<Vec<u8>> {
let mut bytes = vec![0; self.get_piece_length(index)];
self.read(index, 0, &mut bytes)?;
Ok(bytes)
}
pub fn read<T: AsMut<[u8]>>(
&mut self,
index: usize,
offset: usize,
mut data: T,
) -> io::Result<()> {
let data = data.as_mut();
self.internal_read((index * self.piece_length) + offset, data)
}
pub fn write<T: AsRef<[u8]>>(
&mut self,
index: usize,
offset: usize,
data: T,
) -> io::Result<bool> {
let data = data.as_ref();
let item = if let Some(item) = self.buffer.get_mut(&index) {
item
} else {
let bits = self.get_bits_in_pieces(index);
self.buffer.insert(index, {
StoragePiece {
index,
ranges: Default::default(),
buffer: MEMORY_POOL.get(),
bits,
}
});
self.buffer.get_mut(&index).unwrap()
};
item.buffer[offset..offset + data.len()].copy_from_slice(data);
item.ranges.insert(offset);
if item.is_complete() {
if let Some(item) = self.buffer.remove(&index) {
self.internal_write(
item.index * self.piece_length,
&item.buffer[0..self.get_piece_length(item.index)],
)?;
}
Ok(true)
} else {
Ok(false)
}
}
pub fn wipe_piece(&mut self, index: usize) {
self.buffer.remove(&index);
}
pub fn get_piece_length(&self, index: usize) -> usize {
if index + 1 < self.pieces {
return self.piece_length;
}
let len = self.size % self.piece_length;
if len == 0 {
self.piece_length
} else {
len
}
}
pub fn get_bits_in_pieces(&self, index: usize) -> usize {
if index + 1 < self.pieces {
return self.bits_in_piece;
}
let piece_length = self.get_piece_length(index);
(piece_length / REQUEST_SIZE)
+ if piece_length % REQUEST_SIZE > 0 {
1
} else {
0
}
}
pub fn get_bit_length(&self, index: usize, offset: usize) -> u32 {
let piece_length = self.get_piece_length(index);
if offset + REQUEST_SIZE <= piece_length {
return REQUEST_SIZE as u32;
}
(piece_length % REQUEST_SIZE) as u32
}
pub fn has_piece_bit(&self, index: usize, offset: usize) -> bool {
self.buffer
.get(&index)
.map(|item| item.ranges.contains(&offset))
.unwrap_or(false)
}
pub fn house_keeping(&mut self) {
// self.open_files.clean()
}
pub fn size(&self) -> usize {
self.size
}
}
pub trait ToStorageMap {
fn to_storage_map<P: AsRef<Path>>(&self, path: P, is_base_path: bool) -> StorageMap;
}
impl ToStorageMap for Torrent {
fn to_storage_map<P: AsRef<Path>>(&self, path: P, is_base_path: bool) -> StorageMap {
let name = self.name().clone();
match self.meta_info().object() {
MetaInfoObject::File(size) => {
if is_base_path {
StorageMapBuilder::create(path, self.meta_info().piece_length())
.insert(vec![name.to_string()], *size)
.build()
} else {
let path = path.as_ref().to_path_buf();
let parent = path.parent().unwrap_or(if cfg!(target_os = "win") {
Path::new("C:\\")
} else {
Path::new("/")
});
StorageMapBuilder::create(parent, self.meta_info().piece_length())
.insert(
vec![path
.file_name()
.map(|os_str| os_str.to_string_lossy().to_string())
.unwrap_or_else(|| self.name().to_string())],
*size,
)
.build()
}
}
MetaInfoObject::Files(files) => {
let mut builder = if is_base_path {
StorageMapBuilder::create(path, self.meta_info().piece_length())
} else {
StorageMapBuilder::create(
path.as_ref().join(self.name()),
self.meta_info().piece_length(),
)
};
for file in files {
builder = builder.insert(file.path().to_vec(), file.length());
}
builder.build()
}
}
}
}
#[derive(Debug, Clone)]
pub struct StorageMapping {
path: Vec<String>,
length: usize,
offset: usize,
}
pub struct StorageMapBuilder {
base_path: PathBuf,
offset: usize,
piece_length: usize,
items: BTreeMap<usize, StorageMapping>,
}
impl StorageMapBuilder {
pub fn create<P: AsRef<Path>>(path: P, piece_length: usize) -> StorageMapBuilder {
StorageMapBuilder {
base_path: path.as_ref().to_path_buf(),
offset: 0,
piece_length,
items: BTreeMap::new(),
}
}
pub fn insert(mut self, path: Vec<String>, length: usize) -> Self {
let offset = self.offset;
self.offset += length;
self.items.insert(
self.offset - 1,
StorageMapping {
offset,
length,
path,
},
);
self
}
pub fn build(self) -> StorageMap {
let pieces = (self.offset / self.piece_length)
+ if self.offset % self.piece_length > 0 {
1
} else {
0
};
let bits_in_piece = (self.piece_length / REQUEST_SIZE)
+ if (self.piece_length % REQUEST_SIZE) > 0 {
1
} else {
0
};
StorageMap {
base_path: self.base_path,
piece_length: self.piece_length,
pieces,
bits_in_piece,
open_files: Default::default(),
buffer: Default::default(),
mapping: self.items,
size: self.offset,
}
}
}