Compare commits
No commits in common. "b41547ddcbdcd2b64b668210bb59c1d2e25e5458" and "bbc1970bbebb76a15811964dd023193b3ab238a7" have entirely different histories.
b41547ddcb
...
bbc1970bbe
15
Cargo.toml
15
Cargo.toml
@ -1,8 +1,9 @@
|
|||||||
[workspace]
|
[package]
|
||||||
resolver = "2"
|
name = "parsebtrfs"
|
||||||
|
version = "0.1.0"
|
||||||
|
edition = "2021"
|
||||||
|
|
||||||
members = [
|
[dependencies]
|
||||||
"btrfs_explorer_bin",
|
binparse_derive = { path = "../binparse_derive" }
|
||||||
"btrfs_parse_derive",
|
memmap2 = "0.7.1"
|
||||||
"btrfs_explorer",
|
rouille = "3.6.2"
|
||||||
]
|
|
||||||
|
@ -1,9 +0,0 @@
|
|||||||
[package]
|
|
||||||
name = "btrfs_explorer"
|
|
||||||
version = "0.1.0"
|
|
||||||
edition = "2021"
|
|
||||||
|
|
||||||
[dependencies]
|
|
||||||
btrfs_parse_derive = { path = "../btrfs_parse_derive" }
|
|
||||||
maud = "0.26.0"
|
|
||||||
rouille = "3.6.2"
|
|
@ -1,369 +0,0 @@
|
|||||||
use std::convert::identity;
|
|
||||||
use std::rc::Rc;
|
|
||||||
use std::ops::{Deref, RangeBounds, Bound};
|
|
||||||
|
|
||||||
use crate::btrfs_structs::{Leaf, Key, Item, InteriorNode, Node, ParseError, ParseBin, Value, Superblock, ItemType, ZERO_KEY, LAST_KEY};
|
|
||||||
use crate::nodereader::NodeReader;
|
|
||||||
|
|
||||||
/// Represents a B-Tree inside a filesystem image. Can be used to look up keys,
|
|
||||||
/// and handles the tree traversal and the virtual address translation.
|
|
||||||
pub struct Tree<'a> {
|
|
||||||
pub image: &'a [u8],
|
|
||||||
pub reader: Rc<NodeReader<'a>>,
|
|
||||||
pub root_addr_log: u64,
|
|
||||||
}
|
|
||||||
|
|
||||||
impl<'a> Tree<'a> {
|
|
||||||
pub fn new<T: Into<u64>>(image: &'a [u8], tree_id: T) -> Result<Tree<'a>, ParseError> {
|
|
||||||
let superblock = Superblock::parse(&image[0x10000..])?;
|
|
||||||
let reader = Rc::new(NodeReader::new(image)?);
|
|
||||||
|
|
||||||
let root_tree = Tree {
|
|
||||||
image,
|
|
||||||
reader: Rc::clone(&reader),
|
|
||||||
root_addr_log: superblock.root
|
|
||||||
};
|
|
||||||
|
|
||||||
// let tree_root_item = root_tree.find_key(Key::new(tree_id.into(), ItemType::Root, 0))?;
|
|
||||||
let tree_id = tree_id.into();
|
|
||||||
let root_item_key = Key::new(tree_id, ItemType::Root, 0);
|
|
||||||
let tree_root_item = root_tree.range(root_item_key..)
|
|
||||||
.next()
|
|
||||||
.filter(|x| x.key.key_id == tree_id && x.key.key_type == ItemType::Root);
|
|
||||||
|
|
||||||
let root_addr_log = match tree_root_item {
|
|
||||||
Some(Item { key: _, value: Value::Root(root)}) => root.bytenr,
|
|
||||||
_ => return Err("root item not found or invalid".into())
|
|
||||||
};
|
|
||||||
|
|
||||||
Ok(Tree { image, reader: Rc::clone(&reader), root_addr_log })
|
|
||||||
}
|
|
||||||
|
|
||||||
pub fn root(image: &'a [u8]) -> Result<Tree<'a>, ParseError> {
|
|
||||||
let reader = Rc::new(NodeReader::new(image)?);
|
|
||||||
let superblock = Superblock::parse(&image[0x10000..])?;
|
|
||||||
|
|
||||||
Ok(Tree { image, reader, root_addr_log: superblock.root })
|
|
||||||
}
|
|
||||||
|
|
||||||
pub fn chunk(image: &'a [u8]) -> Result<Tree<'a>, ParseError> {
|
|
||||||
let reader = Rc::new(NodeReader::new(image)?);
|
|
||||||
let superblock = Superblock::parse(&image[0x10000..])?;
|
|
||||||
|
|
||||||
Ok(Tree { image, reader, root_addr_log: superblock.chunk_root })
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
/***** looking up keys *****/
|
|
||||||
|
|
||||||
impl Leaf {
|
|
||||||
pub fn find_key(&self, key: Key) -> Option<Item> {
|
|
||||||
self.items
|
|
||||||
.iter()
|
|
||||||
.find(|x|x.key == key)
|
|
||||||
.map(|x|x.clone())
|
|
||||||
}
|
|
||||||
|
|
||||||
pub fn find_key_or_previous(&self, key: Key) -> Option<usize> {
|
|
||||||
self.items
|
|
||||||
.iter()
|
|
||||||
.take_while(|x|x.key <= key)
|
|
||||||
.enumerate()
|
|
||||||
.last()
|
|
||||||
.map(|x|x.0)
|
|
||||||
}
|
|
||||||
|
|
||||||
}
|
|
||||||
|
|
||||||
impl InteriorNode {
|
|
||||||
/// Return the index of the last child which has key at most `key`. This is the
|
|
||||||
/// branch which contains `key` if it exists. Returns `None` if all children are greater than
|
|
||||||
/// `key`, which guarantees that `key` is not among the descendants of `self`.
|
|
||||||
pub fn find_key_or_previous(&self, key: Key) -> Option<usize> {
|
|
||||||
// if the key is not exactly matched, binary_search returns the next index, but we want the previous one
|
|
||||||
match self.children.binary_search_by_key(&key, |x|x.key) {
|
|
||||||
Ok(idx) => Some(idx),
|
|
||||||
Err(idx) if idx == 0 => None,
|
|
||||||
Err(idx) => Some(idx-1),
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
|
|
||||||
impl Tree<'_> {
|
|
||||||
/// Recursively traverse a tree to find a key, given they key and logical address
|
|
||||||
/// of the tree root. Internal function, `Tree::find_key` is the public interface.
|
|
||||||
fn find_key_in_node(&self, addr: u64, key: Key) -> Result<Item, ParseError> {
|
|
||||||
let node = self.reader.get_node(addr)?;
|
|
||||||
|
|
||||||
match node.deref() {
|
|
||||||
Node::Interior(interior_node) => {
|
|
||||||
let next_node_index = interior_node.find_key_or_previous(key).unwrap();
|
|
||||||
let next_node_log = interior_node.children[next_node_index].ptr;
|
|
||||||
self.find_key_in_node(next_node_log, key)
|
|
||||||
},
|
|
||||||
Node::Leaf(leaf) => {
|
|
||||||
leaf.find_key(key).ok_or(
|
|
||||||
error!(
|
|
||||||
"Item with key ({},{:?},{}) was not found in the leaf at logical address 0x{:x}",
|
|
||||||
key.key_id, key.key_type, key.key_offset, addr)
|
|
||||||
)
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
pub fn find_key(&self, key: Key) -> Result<Item, ParseError> {
|
|
||||||
self.find_key_in_node(self.root_addr_log, key)
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
/***** iterator *****/
|
|
||||||
|
|
||||||
pub struct RangeIter<'a, 'b> {
|
|
||||||
tree: &'b Tree<'a>,
|
|
||||||
|
|
||||||
start: Bound<Key>,
|
|
||||||
end: Bound<Key>,
|
|
||||||
forward_skip_fn: Box<dyn Fn(Key) -> Key>,
|
|
||||||
backward_skip_fn: Box<dyn Fn(Key) -> Key>,
|
|
||||||
}
|
|
||||||
|
|
||||||
impl<'a> Tree<'a> {
|
|
||||||
/// Given a tree, a range of indices, and two "skip functions", produces a double
|
|
||||||
/// ended iterator which iterates through the keys contained in the range, in ascending
|
|
||||||
/// or descending order.
|
|
||||||
///
|
|
||||||
/// The skip functions are ignored for now, but are intended as an optimization:
|
|
||||||
/// after a key `k` was returned by the iterator (or the reverse iterator), all keys
|
|
||||||
/// strictly lower than `forward_skip_fn(k)` are skipped (resp. all keys strictly above
|
|
||||||
/// `backward_skip_fn(k)` are skipped.
|
|
||||||
///
|
|
||||||
/// If `forward_skip_fn` and `backward_skip_fn` are the identity, nothing is skipped
|
|
||||||
pub fn range_with_skip<'b, R, F1, F2>(&'b self, range: R, forward_skip_fn: F1, backward_skip_fn: F2) -> RangeIter<'a, 'b>
|
|
||||||
where
|
|
||||||
R: RangeBounds<Key>,
|
|
||||||
F1: Fn(Key) -> Key + 'static,
|
|
||||||
F2: Fn(Key) -> Key + 'static {
|
|
||||||
RangeIter {
|
|
||||||
tree: self,
|
|
||||||
start: range.start_bound().cloned(),
|
|
||||||
end: range.end_bound().cloned(),
|
|
||||||
forward_skip_fn: Box::new(forward_skip_fn),
|
|
||||||
backward_skip_fn: Box::new(backward_skip_fn),
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
pub fn range<'b, R: RangeBounds<Key>>(&'b self, range: R) -> RangeIter<'a, 'b> {
|
|
||||||
RangeIter {
|
|
||||||
tree: self,
|
|
||||||
start: range.start_bound().cloned(),
|
|
||||||
end: range.end_bound().cloned(),
|
|
||||||
forward_skip_fn: Box::new(identity),
|
|
||||||
backward_skip_fn: Box::new(identity),
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
|
|
||||||
pub fn iter<'b>(&'b self) -> RangeIter<'a, 'b> {
|
|
||||||
RangeIter {
|
|
||||||
tree: self,
|
|
||||||
start: Bound::Unbounded,
|
|
||||||
end: Bound::Unbounded,
|
|
||||||
forward_skip_fn: Box::new(identity),
|
|
||||||
backward_skip_fn: Box::new(identity),
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
|
|
||||||
/// Get the first item under the node at logical address `addr`.
|
|
||||||
/// This function panics if there are no items
|
|
||||||
fn get_first_item(tree: &Tree, addr: u64) -> Result<Item, ParseError> {
|
|
||||||
match tree.reader.get_node(addr)?.deref() {
|
|
||||||
Node::Interior(intnode) => get_first_item(tree, intnode.children[0].ptr),
|
|
||||||
Node::Leaf(leafnode) => Ok(leafnode.items[0].clone()),
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
/// Get the last item under the node at logical address `addr`.
|
|
||||||
/// This function panics if there are no items
|
|
||||||
fn get_last_item(tree: &Tree, addr: u64) -> Result<Item, ParseError> {
|
|
||||||
match tree.reader.get_node(addr)?.deref() {
|
|
||||||
Node::Interior(intnode) => get_last_item(tree, intnode.children.last().unwrap().ptr),
|
|
||||||
Node::Leaf(leafnode) => Ok(leafnode.items.last().unwrap().clone()),
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
#[derive(Debug,PartialEq,Eq,Clone,Copy)]
|
|
||||||
enum FindKeyMode {LT, GT, GE, LE}
|
|
||||||
|
|
||||||
/// Try to find the item with key `key` if it exists in the tree, and return
|
|
||||||
/// the "closest" match. The exact meaning of "closest" is given by the `mode` argument:
|
|
||||||
/// If `mode` is `LT`/`GT`/`GE`/`LE`, return the item with the greatest / least / greatest / least
|
|
||||||
/// key less than / greater than / greater or equal to / less or equal to `key`.
|
|
||||||
fn find_closest_key(tree: &Tree, key: Key, mode: FindKeyMode) -> Result<Option<Item>, ParseError> {
|
|
||||||
|
|
||||||
// in some cases, this task can't be accomplished by a single traversal
|
|
||||||
// but we might have to go back up the tree; prev/next allows to quickly go back to the right node
|
|
||||||
let mut current: u64 = tree.root_addr_log;
|
|
||||||
let mut prev: Option<u64> = None;
|
|
||||||
let mut next: Option<u64> = None;
|
|
||||||
|
|
||||||
loop {
|
|
||||||
let node = tree.reader.get_node(current)?;
|
|
||||||
match node.deref() {
|
|
||||||
Node::Interior(intnode) => {
|
|
||||||
match intnode.find_key_or_previous(key) {
|
|
||||||
Some(idx) => {
|
|
||||||
if let Some(kp) = (idx > 0).then(|| intnode.children.get(idx-1)).flatten() {
|
|
||||||
prev = Some(kp.ptr);
|
|
||||||
}
|
|
||||||
if let Some(kp) = intnode.children.get(idx+1) {
|
|
||||||
next = Some(kp.ptr);
|
|
||||||
}
|
|
||||||
|
|
||||||
current = intnode.children[idx].ptr;
|
|
||||||
},
|
|
||||||
None => {
|
|
||||||
// this can only happen if every key in the current node is `> key`
|
|
||||||
// which really should only happen if we're in the root node, as otherwise
|
|
||||||
// we wouldn't have descended into this branch; so assume every key in the
|
|
||||||
// tree is above `> key`.
|
|
||||||
if mode == FindKeyMode::LT || mode == FindKeyMode::LE {
|
|
||||||
return Ok(None);
|
|
||||||
} else {
|
|
||||||
// return the first item in tree; we are an interior node so we really should have
|
|
||||||
// at least one child
|
|
||||||
let addr = intnode.children[0].ptr;
|
|
||||||
return Ok(Some(get_first_item(tree, addr)?));
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
},
|
|
||||||
Node::Leaf(leafnode) => {
|
|
||||||
match leafnode.find_key_or_previous(key) {
|
|
||||||
Some(idx) => {
|
|
||||||
// the standard case, we found a key `k` with the guarantee that `k <= key`
|
|
||||||
let Item {key: k, value: v} = leafnode.items[idx].clone();
|
|
||||||
|
|
||||||
if mode == FindKeyMode::LE || mode == FindKeyMode::LT && k < key || mode == FindKeyMode::GE && k == key {
|
|
||||||
return Ok(Some(Item {key: k, value: v}))
|
|
||||||
} else if mode == FindKeyMode::LT && k == key {
|
|
||||||
// prev
|
|
||||||
if idx > 0 {
|
|
||||||
return Ok(Some(leafnode.items[idx-1].clone()));
|
|
||||||
} else {
|
|
||||||
// use prev
|
|
||||||
if let Some(addr) = prev {
|
|
||||||
return Ok(Some(get_last_item(tree, addr)?));
|
|
||||||
} else {
|
|
||||||
return Ok(None);
|
|
||||||
}
|
|
||||||
}
|
|
||||||
} else {
|
|
||||||
// next
|
|
||||||
if let Some(item) = leafnode.items.get(idx+1) {
|
|
||||||
return Ok(Some(item.clone()));
|
|
||||||
} else {
|
|
||||||
// use next
|
|
||||||
if let Some(addr) = next {
|
|
||||||
return Ok(Some(get_first_item(tree, addr)?));
|
|
||||||
} else {
|
|
||||||
return Ok(None);
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
},
|
|
||||||
None => {
|
|
||||||
// same as above, but this can only happen if the root node is a leaf
|
|
||||||
if mode == FindKeyMode::LT || mode == FindKeyMode::LE {
|
|
||||||
return Ok(None);
|
|
||||||
} else {
|
|
||||||
// return the first item in tree if it exists
|
|
||||||
return Ok(leafnode.items.get(0).map(|x|x.clone()));
|
|
||||||
}
|
|
||||||
},
|
|
||||||
}
|
|
||||||
},
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
fn range_valid<T: Ord>(start: Bound<T>, end: Bound<T>) -> bool {
|
|
||||||
match (start, end) {
|
|
||||||
(Bound::Included(x), Bound::Included(y)) => x <= y,
|
|
||||||
(Bound::Excluded(x), Bound::Included(y)) => x < y,
|
|
||||||
(Bound::Included(x), Bound::Excluded(y)) => x < y,
|
|
||||||
(Bound::Excluded(x), Bound::Excluded(y)) => x < y, // could technically be empty if "y = x+1", but we can't check
|
|
||||||
(_, _) => true, // one of them is unbounded
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
impl<'a, 'b> Iterator for RangeIter<'a, 'b> {
|
|
||||||
type Item = Item;
|
|
||||||
|
|
||||||
fn next(&mut self) -> Option<Item> {
|
|
||||||
if !range_valid(self.start.as_ref(), self.end.as_ref()) {
|
|
||||||
return None;
|
|
||||||
}
|
|
||||||
|
|
||||||
let (start_key, mode): (Key, FindKeyMode) = match &self.start {
|
|
||||||
&Bound::Included(x) => (x, FindKeyMode::GE),
|
|
||||||
&Bound::Excluded(x) => (x, FindKeyMode::GT),
|
|
||||||
&Bound::Unbounded => (ZERO_KEY, FindKeyMode::GE),
|
|
||||||
};
|
|
||||||
|
|
||||||
// FIX: proper error handling
|
|
||||||
let result = find_closest_key(self.tree, start_key, mode)
|
|
||||||
.expect("file system should be consistent (or this is a bug)");
|
|
||||||
|
|
||||||
if let Some(item) = &result {
|
|
||||||
self.start = Bound::Excluded((self.forward_skip_fn)(item.key));
|
|
||||||
}
|
|
||||||
|
|
||||||
let end_filter = |item: &Item| {
|
|
||||||
match &self.end {
|
|
||||||
&Bound::Included(x) => item.key <= x,
|
|
||||||
&Bound::Excluded(x) => item.key < x,
|
|
||||||
&Bound::Unbounded => true,
|
|
||||||
}
|
|
||||||
};
|
|
||||||
|
|
||||||
result
|
|
||||||
.filter(end_filter)
|
|
||||||
.map(|item|item.clone())
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
impl<'a, 'b> DoubleEndedIterator for RangeIter<'a, 'b> {
|
|
||||||
fn next_back(&mut self) -> Option<Item> {
|
|
||||||
if !range_valid(self.start.as_ref(), self.end.as_ref()) {
|
|
||||||
return None;
|
|
||||||
}
|
|
||||||
|
|
||||||
let (start_key, mode): (Key, FindKeyMode) = match &self.end {
|
|
||||||
&Bound::Included(x) => (x, FindKeyMode::LE),
|
|
||||||
&Bound::Excluded(x) => (x, FindKeyMode::LT),
|
|
||||||
&Bound::Unbounded => (LAST_KEY, FindKeyMode::LE),
|
|
||||||
};
|
|
||||||
|
|
||||||
let result = find_closest_key(self.tree, start_key, mode)
|
|
||||||
.expect("file system should be consistent (or this is a bug)");
|
|
||||||
|
|
||||||
if let Some(item) = &result {
|
|
||||||
self.end = Bound::Excluded((self.backward_skip_fn)(item.key));
|
|
||||||
}
|
|
||||||
|
|
||||||
let start_filter = |item: &Item| {
|
|
||||||
match &self.start {
|
|
||||||
&Bound::Included(x) => item.key >= x,
|
|
||||||
&Bound::Excluded(x) => item.key > x,
|
|
||||||
&Bound::Unbounded => true,
|
|
||||||
}
|
|
||||||
};
|
|
||||||
|
|
||||||
result
|
|
||||||
.filter(start_filter)
|
|
||||||
.map(|item|item.clone())
|
|
||||||
}
|
|
||||||
}
|
|
@ -1,136 +0,0 @@
|
|||||||
use std::str::FromStr;
|
|
||||||
use rouille::{Request, Response};
|
|
||||||
use crate::{
|
|
||||||
btrfs_structs::{ItemType, Item, Key, ZERO_KEY, LAST_KEY},
|
|
||||||
btrfs_lookup::Tree,
|
|
||||||
render_tree::{render_table, TableResult},
|
|
||||||
main_error::MainError,
|
|
||||||
};
|
|
||||||
|
|
||||||
enum TreeDisplayMode {
|
|
||||||
// (x,y,z): Highlight key_id x, show y keys before (excluding x*), show z keys after (including x*)
|
|
||||||
Highlight(u64, usize, usize),
|
|
||||||
// (x, y): Show y keys starting at x, including x
|
|
||||||
From(Key, usize),
|
|
||||||
// (x, y): Show y keys before y, excluding y
|
|
||||||
To(Key, usize),
|
|
||||||
}
|
|
||||||
|
|
||||||
|
|
||||||
fn http_tree_internal(tree: &Tree, tree_id: u64, mode: TreeDisplayMode) -> Response {
|
|
||||||
let mut items: Vec<Item>;
|
|
||||||
let mut highlighted_key_id: Option<u64> = None;
|
|
||||||
|
|
||||||
match mode {
|
|
||||||
TreeDisplayMode::Highlight(key_id, before, after) => {
|
|
||||||
let key = Key {key_id, key_type: ItemType::Invalid, key_offset: 0 };
|
|
||||||
items = tree.range(..key).rev().take(before).collect();
|
|
||||||
items.reverse();
|
|
||||||
items.extend(tree.range(key..).take(after));
|
|
||||||
highlighted_key_id = Some(key_id);
|
|
||||||
},
|
|
||||||
TreeDisplayMode::From(key, num_lines) => {
|
|
||||||
items = tree.range(key..).take(num_lines).collect();
|
|
||||||
if items.len() < num_lines {
|
|
||||||
items.reverse();
|
|
||||||
items.extend(tree.range(..key).rev().take(num_lines - items.len()));
|
|
||||||
items.reverse();
|
|
||||||
}
|
|
||||||
},
|
|
||||||
TreeDisplayMode::To(key, num_lines) => {
|
|
||||||
items = tree.range(..key).rev().take(num_lines).collect();
|
|
||||||
items.reverse();
|
|
||||||
if items.len() < num_lines {
|
|
||||||
items.extend(tree.range(key..).take(num_lines - items.len()));
|
|
||||||
}
|
|
||||||
}
|
|
||||||
};
|
|
||||||
|
|
||||||
let table_result = TableResult {
|
|
||||||
tree_id,
|
|
||||||
tree_desc: root_key_desc(tree_id).map(|x|x.to_string()),
|
|
||||||
key_id: highlighted_key_id,
|
|
||||||
items: items.iter().map(|it|(it,&[] as &[u8])).collect(),
|
|
||||||
first_key: items.first().map(|it|it.key).unwrap_or(LAST_KEY),
|
|
||||||
last_key: items.last().map(|it|it.key).unwrap_or(ZERO_KEY),
|
|
||||||
};
|
|
||||||
|
|
||||||
Response::html(render_table(table_result))
|
|
||||||
}
|
|
||||||
|
|
||||||
fn root_key_desc(id: u64) -> Option<&'static str> {
|
|
||||||
match id {
|
|
||||||
1 => Some("root"),
|
|
||||||
2 => Some("extent"),
|
|
||||||
3 => Some("chunk"),
|
|
||||||
4 => Some("device"),
|
|
||||||
5 => Some("filesystem"),
|
|
||||||
6 => Some("root directory"),
|
|
||||||
7 => Some("checksum"),
|
|
||||||
8 => Some("quota"),
|
|
||||||
9 => Some("UUID"),
|
|
||||||
10 => Some("free space"),
|
|
||||||
11 => Some("block group"),
|
|
||||||
0xffff_ffff_ffff_fff7 => Some("data reloc"),
|
|
||||||
_ => None,
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
fn http_tree_parse_parameters(method: Option<&str>, key: Option<&str>) -> Result<TreeDisplayMode, MainError> {
|
|
||||||
let result = match key {
|
|
||||||
None => TreeDisplayMode::From(ZERO_KEY, 50),
|
|
||||||
Some(key) => {
|
|
||||||
let components: Vec<&str> = key.split('-').collect();
|
|
||||||
|
|
||||||
match method {
|
|
||||||
None => {
|
|
||||||
if components.len() < 1 {
|
|
||||||
return Err(MainError(format!("Invalid key: {key}")))
|
|
||||||
}
|
|
||||||
let key_id = u64::from_str_radix(components[0], 16)?;
|
|
||||||
TreeDisplayMode::Highlight(key_id, 10, 40)
|
|
||||||
},
|
|
||||||
Some(method) => {
|
|
||||||
if components.len() < 3 {
|
|
||||||
return Err(MainError(format!("Invalid key: {key}")))
|
|
||||||
}
|
|
||||||
|
|
||||||
let key_id = u64::from_str_radix(components[0], 16)?;
|
|
||||||
let key_type: ItemType = u8::from_str_radix(components[1], 16)?.into();
|
|
||||||
let key_offset = u64::from_str_radix(components[2], 16)?;
|
|
||||||
let key = Key {key_id, key_type, key_offset };
|
|
||||||
|
|
||||||
if method == "from" {
|
|
||||||
TreeDisplayMode::From(key, 50)
|
|
||||||
} else if method == "to" {
|
|
||||||
TreeDisplayMode::To(key, 50)
|
|
||||||
} else {
|
|
||||||
return Err(MainError(format!("not a valid method: {method}")))
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
};
|
|
||||||
|
|
||||||
Ok(result)
|
|
||||||
}
|
|
||||||
|
|
||||||
pub fn http_tree(image: &[u8], tree_id: &str, method: Option<&str>, key: Option<&str>, _req: &Request) -> Result<Response, MainError> {
|
|
||||||
let tree_display_mode = http_tree_parse_parameters(method, key)?;
|
|
||||||
|
|
||||||
let tree_id = u64::from_str(tree_id).unwrap();
|
|
||||||
let tree = if tree_id == 1 {
|
|
||||||
Tree::root(image).unwrap()
|
|
||||||
} else if tree_id == 3 {
|
|
||||||
Tree::chunk(image).unwrap()
|
|
||||||
} else {
|
|
||||||
Tree::new(image, tree_id).unwrap()
|
|
||||||
};
|
|
||||||
|
|
||||||
Ok(http_tree_internal(&tree, tree_id, tree_display_mode))
|
|
||||||
}
|
|
||||||
|
|
||||||
pub fn http_root(image: &[u8], _key: Option<&str>, _req: &Request) -> Response {
|
|
||||||
let tree = Tree::root(image).unwrap();
|
|
||||||
http_tree_internal(&tree, 1, TreeDisplayMode::From(ZERO_KEY, 100))
|
|
||||||
}
|
|
@ -1,45 +0,0 @@
|
|||||||
pub struct MainError(pub String);
|
|
||||||
|
|
||||||
impl std::error::Error for MainError {}
|
|
||||||
|
|
||||||
impl std::fmt::Debug for MainError {
|
|
||||||
fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
|
|
||||||
write!(f, "{}", &self.0)
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
impl std::fmt::Display for MainError {
|
|
||||||
fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
|
|
||||||
write!(f, "{}", &self.0)
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
impl From<String> for MainError {
|
|
||||||
fn from(value: String) -> MainError {
|
|
||||||
MainError(value)
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
impl From<&str> for MainError {
|
|
||||||
fn from(value: &str) -> MainError {
|
|
||||||
MainError::from(String::from(value))
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
impl From<crate::btrfs_structs::ParseError> for MainError {
|
|
||||||
fn from(value: crate::btrfs_structs::ParseError) -> MainError {
|
|
||||||
MainError::from(format!("BTRFS format error: {value}"))
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
impl From<std::io::Error> for MainError {
|
|
||||||
fn from(value: std::io::Error) -> MainError {
|
|
||||||
MainError::from(format!("IO error: {value}"))
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
impl From<std::num::ParseIntError> for MainError {
|
|
||||||
fn from(value: std::num::ParseIntError) -> MainError {
|
|
||||||
MainError::from(format!("Not an integer: {value}"))
|
|
||||||
}
|
|
||||||
}
|
|
@ -1,45 +0,0 @@
|
|||||||
use std::{
|
|
||||||
collections::HashMap,
|
|
||||||
sync::Arc,
|
|
||||||
cell::RefCell,
|
|
||||||
};
|
|
||||||
|
|
||||||
use crate::btrfs_structs::{Node, ParseError, ParseBin};
|
|
||||||
use crate::addrmap::{LogToPhys, AddressMap};
|
|
||||||
|
|
||||||
pub struct NodeReader<'a> {
|
|
||||||
image: &'a [u8],
|
|
||||||
addr_map: AddressMap,
|
|
||||||
cache: RefCell<HashMap<u64, Arc<Node>>>,
|
|
||||||
}
|
|
||||||
|
|
||||||
impl<'a> NodeReader<'a> {
|
|
||||||
pub fn new(image: &'a [u8]) -> Result<NodeReader<'a>, ParseError> {
|
|
||||||
let addr_map = AddressMap::new(image)?;
|
|
||||||
Ok(NodeReader {image, addr_map, cache: RefCell::new(HashMap::new())})
|
|
||||||
}
|
|
||||||
|
|
||||||
pub fn with_addrmap(image: &'a [u8], addr_map: AddressMap) -> Result<NodeReader<'a>, ParseError> {
|
|
||||||
Ok(NodeReader {image, addr_map, cache: RefCell::new(HashMap::new())})
|
|
||||||
}
|
|
||||||
|
|
||||||
/// Read a node given its logical address
|
|
||||||
pub fn get_node(&self, addr: u64) -> Result<Arc<Node>, ParseError> {
|
|
||||||
if let Some(node) = self.cache.borrow().get(&addr) {
|
|
||||||
return Ok(Arc::clone(node))
|
|
||||||
}
|
|
||||||
|
|
||||||
println!("Reading node at {:X}", addr);
|
|
||||||
|
|
||||||
let node_data = self.addr_map.node_at_log(self.image, addr)?;
|
|
||||||
let node = Arc::new(Node::parse(node_data)?);
|
|
||||||
|
|
||||||
self.cache.borrow_mut().insert(addr, Arc::clone(&node));
|
|
||||||
|
|
||||||
Ok(node)
|
|
||||||
}
|
|
||||||
|
|
||||||
pub fn addr_map(&self) -> &AddressMap {
|
|
||||||
&self.addr_map
|
|
||||||
}
|
|
||||||
}
|
|
@ -1,38 +0,0 @@
|
|||||||
use maud::Render;
|
|
||||||
use std::fmt::{Debug, UpperHex};
|
|
||||||
|
|
||||||
pub struct DebugRender<T>(pub T);
|
|
||||||
|
|
||||||
impl<T: Debug> Render for DebugRender<T> {
|
|
||||||
fn render_to(&self, w: &mut String) {
|
|
||||||
format_args!("{0:#?}", self.0).render_to(w);
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
pub struct Hex<T>(pub T);
|
|
||||||
|
|
||||||
impl<T: UpperHex> Render for Hex<T> {
|
|
||||||
fn render_to(&self, w: &mut String) {
|
|
||||||
format_args!("{0:X}", self.0).render_to(w);
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
pub fn size_name(x: u64) -> String {
|
|
||||||
if x == 0 {
|
|
||||||
format!("0 B")
|
|
||||||
} else if x % (1<<10) != 0 {
|
|
||||||
format!("{} B", x)
|
|
||||||
} else if x % (1<<20) != 0 {
|
|
||||||
format!("{} KiB", x / (1<<10))
|
|
||||||
} else if x % (1<<30) != 0 {
|
|
||||||
format!("{} MiB", x / (1<<20))
|
|
||||||
} else if x % (1<<40) != 0 {
|
|
||||||
format!("{} GiB", x / (1<<30))
|
|
||||||
} else if x % (1<<50) != 0 {
|
|
||||||
format!("{} TiB", x / (1<<40))
|
|
||||||
} else if x % (1<<60) != 0 {
|
|
||||||
format!("{} PiB", x / (1<<50))
|
|
||||||
} else {
|
|
||||||
format!("{} EiB", x / (1<<60))
|
|
||||||
}
|
|
||||||
}
|
|
@ -1,290 +0,0 @@
|
|||||||
use crate::btrfs_structs::{Item, Key, ItemType, Value, ExtentDataBody};
|
|
||||||
use crate::render_common::{Hex, size_name};
|
|
||||||
use maud::{Markup, html, DOCTYPE, PreEscaped};
|
|
||||||
|
|
||||||
#[derive(Debug)]
|
|
||||||
pub struct TableResult<'a> {
|
|
||||||
pub tree_id: u64,
|
|
||||||
pub tree_desc: Option<String>,
|
|
||||||
pub key_id: Option<u64>,
|
|
||||||
pub items: Vec<(&'a Item, &'a [u8])>,
|
|
||||||
pub first_key: Key,
|
|
||||||
pub last_key: Key,
|
|
||||||
}
|
|
||||||
|
|
||||||
pub fn render_table(table: TableResult) -> Markup {
|
|
||||||
|
|
||||||
let header: String = if let Some(desc) = table.tree_desc {
|
|
||||||
format!("Tree {} ({})", table.tree_id, desc)
|
|
||||||
} else {
|
|
||||||
format!("Tree {}", table.tree_id)
|
|
||||||
};
|
|
||||||
|
|
||||||
let key_input_value = table.key_id.map_or(String::new(), |x| format!("{:X}", x));
|
|
||||||
|
|
||||||
let first_key_url = format!("/tree/{}",
|
|
||||||
table.tree_id);
|
|
||||||
let prev_key_url = format!("/tree/{}/to/{:016X}-{:02X}-{:016X}",
|
|
||||||
table.tree_id,
|
|
||||||
table.first_key.key_id,
|
|
||||||
u8::from(table.first_key.key_type),
|
|
||||||
table.first_key.key_offset);
|
|
||||||
let next_key_url = format!("/tree/{}/from/{:016X}-{:02X}-{:016X}",
|
|
||||||
table.tree_id,
|
|
||||||
table.last_key.key_id,
|
|
||||||
u8::from(table.last_key.key_type),
|
|
||||||
table.first_key.key_offset);
|
|
||||||
let last_key_url = format!("/tree/{}/to/{:016X}-{:02X}-{:016X}",
|
|
||||||
table.tree_id,
|
|
||||||
u64::wrapping_sub(0,1),
|
|
||||||
u8::wrapping_sub(0,1),
|
|
||||||
u64::wrapping_sub(0,1));
|
|
||||||
|
|
||||||
let mut rows: Vec<Markup> = Vec::new();
|
|
||||||
|
|
||||||
for &(it, _it_data) in table.items.iter() {
|
|
||||||
let highlighted = if table.key_id.filter(|x|*x == it.key.key_id).is_some() { "highlight" } else { "" };
|
|
||||||
let value_string = item_value_string(table.tree_id, it);
|
|
||||||
let details_string = item_details_string(table.tree_id, it);
|
|
||||||
let raw_string = format!("{:#?}", &it.value);
|
|
||||||
let id_desc = row_id_desc(it.key, table.tree_id);
|
|
||||||
|
|
||||||
rows.push(html! {
|
|
||||||
details.item.(highlighted) {
|
|
||||||
summary {
|
|
||||||
span.key.key_id.(key_type_class(it.key)) {
|
|
||||||
(id_desc.0)
|
|
||||||
}
|
|
||||||
span.key.key_type.(key_type_class(it.key)) {
|
|
||||||
(id_desc.1)
|
|
||||||
}
|
|
||||||
span.key.key_offset.(key_type_class(it.key)) {
|
|
||||||
(id_desc.2)
|
|
||||||
}
|
|
||||||
span.itemvalue.(key_type_class(it.key)) {
|
|
||||||
(&value_string)
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
div.details {
|
|
||||||
(&details_string)
|
|
||||||
|
|
||||||
details {
|
|
||||||
summary {
|
|
||||||
"show full value"
|
|
||||||
}
|
|
||||||
pre {
|
|
||||||
(&raw_string)
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
});
|
|
||||||
}
|
|
||||||
|
|
||||||
// the complete page
|
|
||||||
html! {
|
|
||||||
(DOCTYPE)
|
|
||||||
head {
|
|
||||||
link rel="stylesheet" href="/style.css";
|
|
||||||
}
|
|
||||||
body {
|
|
||||||
h1 {
|
|
||||||
(header)
|
|
||||||
}
|
|
||||||
|
|
||||||
@if table.tree_id != 1 {
|
|
||||||
a href="/tree/1" {
|
|
||||||
"go back to root tree"
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
form method="get" action={"/tree/" (table.tree_id)} {
|
|
||||||
input type="text" name="key" value=(key_input_value);
|
|
||||||
input type="submit" value="Search";
|
|
||||||
}
|
|
||||||
|
|
||||||
a.nav href=(first_key_url) { div.nav { "first" } }
|
|
||||||
a.nav href=(prev_key_url) { div.nav { "prev" } }
|
|
||||||
|
|
||||||
@for row in &rows { (row) }
|
|
||||||
|
|
||||||
a.nav href=(next_key_url) { div.nav { "next" } }
|
|
||||||
a.nav href=(last_key_url) { div.nav { "last" } }
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
fn key_type_class(key: Key) -> &'static str {
|
|
||||||
match key.key_type {
|
|
||||||
ItemType::Inode => "inode",
|
|
||||||
ItemType::Ref => "ref",
|
|
||||||
ItemType::RootRef => "ref",
|
|
||||||
ItemType::RootBackRef => "ref",
|
|
||||||
ItemType::ExtentData => "extent",
|
|
||||||
ItemType::Dir => "dir",
|
|
||||||
ItemType::DirIndex => "dir",
|
|
||||||
ItemType::Root => "root",
|
|
||||||
_ => "",
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
fn row_id_desc(key: Key, tree_id: u64) -> (Markup, Markup, Markup) {
|
|
||||||
let x = format!("{:X}", key.key_id);
|
|
||||||
let y = format!("{:?} ({:02X})", key.key_type, u8::from(key.key_type));
|
|
||||||
let z = if key.key_type == ItemType::RootRef || key.key_type == ItemType::Ref {
|
|
||||||
format!("<a href=\"/tree/{}/{:X}\">{:X}</a>", tree_id, key.key_offset, key.key_offset)
|
|
||||||
} else {
|
|
||||||
format!("{:X}", key.key_offset)
|
|
||||||
};
|
|
||||||
(PreEscaped(x),PreEscaped(y),PreEscaped(z))
|
|
||||||
}
|
|
||||||
|
|
||||||
fn item_value_string(tree_id: u64, item: &Item) -> Markup {
|
|
||||||
match &item.value {
|
|
||||||
Value::Root(_) => {
|
|
||||||
html! { a href={"/tree/" (item.key.key_id)} { "go to tree " (item.key.key_id) } }
|
|
||||||
},
|
|
||||||
Value::Dir(dir_item) | Value::DirIndex(dir_item) => {
|
|
||||||
let name = format!("{:?}", &dir_item.name);
|
|
||||||
let id = dir_item.location.key_id;
|
|
||||||
html! {
|
|
||||||
(name)
|
|
||||||
" @ "
|
|
||||||
a href=(format!("/tree/{tree_id}/{id:x}")) {
|
|
||||||
(Hex(id))
|
|
||||||
}
|
|
||||||
}
|
|
||||||
},
|
|
||||||
Value::Inode(inode_item) => {
|
|
||||||
let file_type = match inode_item.mode / (1<<12) {
|
|
||||||
4 => "directory",
|
|
||||||
2 => "character device",
|
|
||||||
6 => "block device",
|
|
||||||
8 => "regular file",
|
|
||||||
1 => "FIFO",
|
|
||||||
10 => "symbolic link",
|
|
||||||
12 => "socket",
|
|
||||||
_ => "unknown file type",
|
|
||||||
};
|
|
||||||
format_escape!("{}, mode {}{}{}{}", file_type,
|
|
||||||
(inode_item.mode / (1<<9)) % 8,
|
|
||||||
(inode_item.mode / (1<<6)) % 8,
|
|
||||||
(inode_item.mode / (1<<3)) % 8,
|
|
||||||
(inode_item.mode / (1<<0)) % 8)
|
|
||||||
},
|
|
||||||
Value::ExtentData(extent_data_item) =>
|
|
||||||
match &extent_data_item.data {
|
|
||||||
ExtentDataBody::Inline(data) =>
|
|
||||||
PreEscaped(format!("inline, length {}", size_name(data.len() as u64))),
|
|
||||||
ExtentDataBody::External(ext_extent) =>
|
|
||||||
PreEscaped(format!("external, length {}", size_name(ext_extent.num_bytes))),
|
|
||||||
},
|
|
||||||
Value::Ref(ref_item) =>
|
|
||||||
html! { (format!("{:?}", &ref_item.name)) },
|
|
||||||
Value::RootRef(ref_item) =>
|
|
||||||
html! { (format!("{:?}", &ref_item.name)) },
|
|
||||||
Value::Extent(extent_item) =>
|
|
||||||
PreEscaped(format!("flags: {}, block_refs: {:?}", extent_item.flags, extent_item.block_refs)),
|
|
||||||
Value::BlockGroup(blockgroup_item) =>
|
|
||||||
PreEscaped(format!("{} used", size_name(blockgroup_item.used))),
|
|
||||||
Value::DevExtent(dev_extent_item) =>
|
|
||||||
PreEscaped(format!("chunk_tree: {}, chunk_offset: {:x}, length: {}", dev_extent_item.chunk_tree, dev_extent_item.chunk_offset, size_name(dev_extent_item.length))),
|
|
||||||
Value::UUIDSubvol(uuid_subvol_item) =>
|
|
||||||
PreEscaped(format!("subvolume id: {}", uuid_subvol_item.subvol_id)),
|
|
||||||
Value::FreeSpaceInfo(free_space_info) =>
|
|
||||||
PreEscaped(format!("extent_count: {}, flags: {}", free_space_info.extent_count, free_space_info.flags)),
|
|
||||||
Value::Dev(dev_item) =>
|
|
||||||
PreEscaped(format!("total_bytes: {}", size_name(dev_item.total_bytes))),
|
|
||||||
Value::Chunk(chunk_item) =>
|
|
||||||
PreEscaped(format!("size: {}", size_name(chunk_item.size))),
|
|
||||||
_ => {
|
|
||||||
// println!("{:?} {:?}", item.key, item.valu);
|
|
||||||
PreEscaped(String::new())
|
|
||||||
},
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
fn item_details_string(_tree_id: u64, item: &Item) -> Markup {
|
|
||||||
match &item.value {
|
|
||||||
Value::Inode(inode_item) => {
|
|
||||||
html! { table { tbody {
|
|
||||||
tr { td { "size" } td { (inode_item.size) } }
|
|
||||||
tr { td { "mode" } td { (inode_item.mode) } }
|
|
||||||
tr { td { "uid" } td { (inode_item.uid) } }
|
|
||||||
tr { td { "gid" } td { (inode_item.gid) } }
|
|
||||||
tr { td { "nlink" } td { (inode_item.nlink) } }
|
|
||||||
tr { td { "atime" } td { (inode_item.atime.sec) } }
|
|
||||||
tr { td { "ctime" } td { (inode_item.ctime.sec) } }
|
|
||||||
tr { td { "mtime" } td { (inode_item.mtime.sec) } }
|
|
||||||
tr { td { "otime" } td { (inode_item.otime.sec) } }
|
|
||||||
}}}
|
|
||||||
},
|
|
||||||
Value::ExtentData(extent_item) => {
|
|
||||||
match &extent_item.data {
|
|
||||||
ExtentDataBody::Inline(_data) => {
|
|
||||||
html! {} // we really want data as string / hex
|
|
||||||
},
|
|
||||||
ExtentDataBody::External(ext_extent) => {
|
|
||||||
html! {
|
|
||||||
p {
|
|
||||||
@if ext_extent.disk_bytenr == 0 {
|
|
||||||
(size_name(ext_extent.num_bytes)) " of zeros."
|
|
||||||
} @ else {
|
|
||||||
(format!("{} on disk, starting at offset {:X} within the extent at address {:X}; {} in the file starting from offset {:X}.", size_name(ext_extent.disk_num_bytes), ext_extent.offset, ext_extent.disk_bytenr, size_name(ext_extent.num_bytes), item.key.key_offset))
|
|
||||||
}
|
|
||||||
}
|
|
||||||
table { tbody {
|
|
||||||
tr { td { "compression" } td { (extent_item.header.compression) } }
|
|
||||||
tr { td { "encryption" } td { (extent_item.header.encryption) } }
|
|
||||||
tr { td { "other_encoding" } td { (extent_item.header.other_encoding) } }
|
|
||||||
}}
|
|
||||||
}
|
|
||||||
},
|
|
||||||
}
|
|
||||||
},
|
|
||||||
Value::Ref(ref_item) => {
|
|
||||||
html! { table { tbody {
|
|
||||||
tr { td { "name" } td { (format!("{:?}", ref_item.name)) } }
|
|
||||||
tr { td { "index" } td { (ref_item.index) } }
|
|
||||||
}}}
|
|
||||||
},
|
|
||||||
Value::Dir(dir_item) | Value::DirIndex(dir_item) => {
|
|
||||||
html! { table { tbody {
|
|
||||||
tr { td { "name" } td { (format!("{:?}", dir_item.name)) } }
|
|
||||||
}}}
|
|
||||||
},
|
|
||||||
Value::Root(root_item) => {
|
|
||||||
html! { table { tbody {
|
|
||||||
tr { td { "root dir id" } td { (format!("{:X}", root_item.root_dirid)) } }
|
|
||||||
tr { td { "logical address" } td { (format!("{:X}", root_item.bytenr)) } }
|
|
||||||
tr { td { "bytes used" } td { (size_name(root_item.bytes_used)) } }
|
|
||||||
tr { td { "last snapshot" } td { (root_item.last_snapshot) } }
|
|
||||||
tr { td { "flags" } td { (root_item.flags) } }
|
|
||||||
tr { td { "refs" } td { (root_item.refs) } }
|
|
||||||
tr { td { "level" } td { (root_item.level) } }
|
|
||||||
tr { td { "UUID" } td { (format!("{:?}", root_item.uuid)) } }
|
|
||||||
tr { td { "parent UUID" } td { (format!("{:?}", root_item.parent_uuid)) } }
|
|
||||||
tr { td { "received UUID" } td { (format!("{:?}", root_item.received_uuid)) } }
|
|
||||||
tr { td { "ctransid" } td { (root_item.ctransid) } }
|
|
||||||
tr { td { "otransid" } td { (root_item.otransid) } }
|
|
||||||
tr { td { "stransid" } td { (root_item.stransid) } }
|
|
||||||
tr { td { "rtransid" } td { (root_item.rtransid) } }
|
|
||||||
tr { td { "ctime" } td { (root_item.ctime.sec) } }
|
|
||||||
tr { td { "otime" } td { (root_item.otime.sec) } }
|
|
||||||
tr { td { "stime" } td { (root_item.stime.sec) } }
|
|
||||||
tr { td { "rtime" } td { (root_item.rtime.sec) } }
|
|
||||||
}}}
|
|
||||||
},
|
|
||||||
Value::RootRef(root_ref_item) => {
|
|
||||||
html! { table { tbody {
|
|
||||||
tr { td { "name" } td { (format!("{:?}", root_ref_item.name)) } }
|
|
||||||
tr { td { "directory" } td { (root_ref_item.directory) } }
|
|
||||||
tr { td { "index" } td { (root_ref_item.index) } }
|
|
||||||
}}}
|
|
||||||
},
|
|
||||||
_ => {
|
|
||||||
html! {}
|
|
||||||
},
|
|
||||||
}
|
|
||||||
}
|
|
@ -1,12 +0,0 @@
|
|||||||
[package]
|
|
||||||
name = "btrfs_explorer_bin"
|
|
||||||
version = "0.1.0"
|
|
||||||
edition = "2021"
|
|
||||||
|
|
||||||
# See more keys and their definitions at https://doc.rust-lang.org/cargo/reference/manifest.html
|
|
||||||
|
|
||||||
[dependencies]
|
|
||||||
btrfs_explorer = { path = "../btrfs_explorer" }
|
|
||||||
memmap2 = "0.7.1"
|
|
||||||
maud = "0.26.0"
|
|
||||||
rouille = "3.6.2"
|
|
@ -1,14 +0,0 @@
|
|||||||
[package]
|
|
||||||
name = "btrfs_parse_derive"
|
|
||||||
version = "0.1.0"
|
|
||||||
edition = "2021"
|
|
||||||
|
|
||||||
# See more keys and their definitions at https://doc.rust-lang.org/cargo/reference/manifest.html
|
|
||||||
|
|
||||||
[dependencies]
|
|
||||||
proc-macro2 = "1.0.66"
|
|
||||||
quote = "1.0.32"
|
|
||||||
syn = "2.0.27"
|
|
||||||
|
|
||||||
[lib]
|
|
||||||
proc-macro = true
|
|
@ -1,178 +0,0 @@
|
|||||||
use quote::{quote, format_ident};
|
|
||||||
use proc_macro2::Span;
|
|
||||||
use proc_macro::TokenStream;
|
|
||||||
use syn::{DeriveInput, Data::Enum, parse_macro_input};
|
|
||||||
|
|
||||||
#[proc_macro_derive(AllVariants)]
|
|
||||||
pub fn derive_all_variants(input: TokenStream) -> TokenStream {
|
|
||||||
let syn_item: DeriveInput = parse_macro_input!(input);
|
|
||||||
|
|
||||||
let variants = match syn_item.data {
|
|
||||||
Enum(enum_item) => {
|
|
||||||
enum_item.variants.into_iter().map(|v|v.ident)
|
|
||||||
},
|
|
||||||
_ => panic!("AllVariants only works on enums!"),
|
|
||||||
};
|
|
||||||
let enum_name = syn_item.ident;
|
|
||||||
|
|
||||||
let expanded = quote! {
|
|
||||||
impl #enum_name {
|
|
||||||
fn all_variants() -> &'static[#enum_name] {
|
|
||||||
&[ #(#enum_name::#variants),* ]
|
|
||||||
}
|
|
||||||
}
|
|
||||||
};
|
|
||||||
|
|
||||||
expanded.into()
|
|
||||||
}
|
|
||||||
|
|
||||||
#[proc_macro_derive(ParseBin, attributes(skip_bytes, len))]
|
|
||||||
pub fn derive_parse_bin(input: TokenStream) -> TokenStream {
|
|
||||||
let syn_item: DeriveInput = parse_macro_input!(input);
|
|
||||||
let name = syn_item.ident;
|
|
||||||
|
|
||||||
match syn_item.data {
|
|
||||||
syn::Data::Struct(struct_item) => {
|
|
||||||
match struct_item.fields {
|
|
||||||
syn::Fields::Named(fields_named) => {
|
|
||||||
derive_parse_bin_struct(&name, &fields_named.named)
|
|
||||||
},
|
|
||||||
syn::Fields::Unnamed(fields_unnamed) => {
|
|
||||||
if fields_unnamed.unnamed.len() != 1 {
|
|
||||||
panic!("ParseBin does not support tuple structs!");
|
|
||||||
}
|
|
||||||
|
|
||||||
let inner_type = fields_unnamed.unnamed.into_iter().next().unwrap().ty;
|
|
||||||
derive_parse_bin_alias(name, inner_type)
|
|
||||||
},
|
|
||||||
_ => panic!("ParseBin on unit structs makes no sense!"),
|
|
||||||
}
|
|
||||||
},
|
|
||||||
_ => panic!("ParseBin only works on structs so far!"),
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
fn derive_parse_bin_alias(name: syn::Ident, ty: syn::Type) -> TokenStream {
|
|
||||||
quote! {
|
|
||||||
impl ParseBin for #name {
|
|
||||||
fn parse_len(bytes: &[u8]) -> Result<(Self, usize), ParseError> {
|
|
||||||
let (result, size) = <#ty>::parse_len(bytes)?;
|
|
||||||
Ok((#name(result), size))
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}.into()
|
|
||||||
}
|
|
||||||
|
|
||||||
fn derive_parse_bin_struct<'a, T>(name: &syn::Ident, fields: T) -> TokenStream
|
|
||||||
where T: IntoIterator<Item = &'a syn::Field>
|
|
||||||
{
|
|
||||||
let mut parsing_statements = Vec::new();
|
|
||||||
let mut combining_expressions = Vec::new();
|
|
||||||
|
|
||||||
for field in fields {
|
|
||||||
let field_name = field.ident.as_ref().unwrap();
|
|
||||||
let field_type = &field.ty;
|
|
||||||
let mut skip: Option<usize> = None;
|
|
||||||
let mut veclen: Option<String> = None;
|
|
||||||
|
|
||||||
// look for attributes
|
|
||||||
for at in &field.attrs {
|
|
||||||
if let syn::Meta::NameValue(nv) = &at.meta {
|
|
||||||
if nv.path.segments.len() == 1 {
|
|
||||||
let attr_name = nv.path.segments[0].ident.to_string();
|
|
||||||
if attr_name == "skip_bytes" {
|
|
||||||
if let syn::Expr::Lit(expr) = &nv.value {
|
|
||||||
if let syn::Lit::Int(nbytes) = &expr.lit {
|
|
||||||
// println!("reserved = {}", nbytes);
|
|
||||||
skip = nbytes.base10_parse::<usize>().ok()
|
|
||||||
}
|
|
||||||
}
|
|
||||||
} else if attr_name == "len" {
|
|
||||||
if let syn::Expr::Lit(expr) = &nv.value {
|
|
||||||
if let syn::Lit::Str(litstr) = &expr.lit {
|
|
||||||
// println!("len = {}", litstr.value());
|
|
||||||
veclen = Some(litstr.value());
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
if let Some(offset) = skip {
|
|
||||||
parsing_statements.push(quote!{
|
|
||||||
__parse_bin_derive_size += #offset;
|
|
||||||
});
|
|
||||||
}
|
|
||||||
|
|
||||||
if let Some(varname) = veclen {
|
|
||||||
let field_name_item = format_ident!("{}_item", field_name);
|
|
||||||
|
|
||||||
enum FieldType<'a> {
|
|
||||||
Vec(&'a syn::Type),
|
|
||||||
CString,
|
|
||||||
}
|
|
||||||
|
|
||||||
let syn::Type::Path(tp) = &field_type else { panic!() };
|
|
||||||
let single = tp.path.segments.iter().next().unwrap();
|
|
||||||
|
|
||||||
let field_type = if &single.ident.to_string() == "Vec" {
|
|
||||||
let syn::PathArguments::AngleBracketed(args) = &single.arguments else { panic!() };
|
|
||||||
let firstarg = args.args.iter().next().unwrap();
|
|
||||||
let syn::GenericArgument::Type(ty) = firstarg else { panic!() };
|
|
||||||
FieldType::Vec(ty)
|
|
||||||
} else if &single.ident.to_string() == "CString" {
|
|
||||||
FieldType::CString
|
|
||||||
} else {
|
|
||||||
panic!("The len attribute is only allowed on Vec<_> or CString")
|
|
||||||
};
|
|
||||||
|
|
||||||
let varname_ident = syn::Ident::new(&varname, Span::call_site());
|
|
||||||
|
|
||||||
|
|
||||||
match field_type {
|
|
||||||
FieldType::Vec(field_type_item) => {
|
|
||||||
parsing_statements.push(quote!{
|
|
||||||
let mut #field_name = Vec::new();
|
|
||||||
for i in 0 .. #varname_ident.0 as usize {
|
|
||||||
let #field_name_item = <#field_type_item>::parse_len(&bytes[__parse_bin_derive_size..])?;
|
|
||||||
__parse_bin_derive_size += #field_name_item.1;
|
|
||||||
#field_name.push(#field_name_item.0);
|
|
||||||
}
|
|
||||||
});
|
|
||||||
combining_expressions.push(quote!(#field_name: #field_name));
|
|
||||||
},
|
|
||||||
FieldType::CString => {
|
|
||||||
parsing_statements.push(quote!{
|
|
||||||
let #field_name = CString::parse_len(&bytes[__parse_bin_derive_size .. __parse_bin_derive_size + #varname_ident.0 as usize])?;
|
|
||||||
__parse_bin_derive_size += #varname_ident.0 as usize;
|
|
||||||
});
|
|
||||||
combining_expressions.push(quote!(#field_name: #field_name.0));
|
|
||||||
},
|
|
||||||
}
|
|
||||||
} else {
|
|
||||||
parsing_statements.push(quote!{
|
|
||||||
let #field_name = <#field_type>::parse_len(&bytes[__parse_bin_derive_size..])?;
|
|
||||||
__parse_bin_derive_size += #field_name.1;
|
|
||||||
});
|
|
||||||
|
|
||||||
combining_expressions.push(quote!(#field_name: #field_name.0));
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
quote! {
|
|
||||||
impl ParseBin for #name {
|
|
||||||
fn parse_len(bytes: &[u8]) -> Result<(Self, usize), ParseError> {
|
|
||||||
let mut __parse_bin_derive_size: usize = 0;
|
|
||||||
|
|
||||||
#(#parsing_statements)*
|
|
||||||
|
|
||||||
let result = #name {
|
|
||||||
#(#combining_expressions),*
|
|
||||||
};
|
|
||||||
|
|
||||||
Ok((result, __parse_bin_derive_size))
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}.into()
|
|
||||||
}
|
|
@ -2,7 +2,6 @@ use std::rc::Rc;
|
|||||||
|
|
||||||
use crate::btrfs_structs::{ParseBin, Key, ChunkItem, Value, Superblock, ParseError, NODE_SIZE};
|
use crate::btrfs_structs::{ParseBin, Key, ChunkItem, Value, Superblock, ParseError, NODE_SIZE};
|
||||||
use crate::btrfs_lookup::Tree;
|
use crate::btrfs_lookup::Tree;
|
||||||
use crate::nodereader::NodeReader;
|
|
||||||
|
|
||||||
#[derive(Debug, Clone)]
|
#[derive(Debug, Clone)]
|
||||||
pub struct AddressMap(pub Vec<(u64,u64,Vec<(u64,u64)>)>);
|
pub struct AddressMap(pub Vec<(u64,u64,Vec<(u64,u64)>)>);
|
||||||
@ -12,11 +11,10 @@ impl AddressMap {
|
|||||||
pub fn new(image: &[u8]) -> Result<AddressMap, ParseError> {
|
pub fn new(image: &[u8]) -> Result<AddressMap, ParseError> {
|
||||||
let superblock = Superblock::parse(&image[0x10000..])?;
|
let superblock = Superblock::parse(&image[0x10000..])?;
|
||||||
let bootstrap_addr = AddressMap::from_superblock(&superblock)?;
|
let bootstrap_addr = AddressMap::from_superblock(&superblock)?;
|
||||||
let reader = Rc::new(NodeReader::with_addrmap(image, bootstrap_addr)?);
|
|
||||||
|
|
||||||
let chunk_tree = Tree {
|
let chunk_tree = Tree {
|
||||||
image,
|
image: image,
|
||||||
reader,
|
addr_map: Rc::new(bootstrap_addr),
|
||||||
root_addr_log: superblock.chunk_root,
|
root_addr_log: superblock.chunk_root,
|
||||||
};
|
};
|
||||||
|
|
||||||
@ -91,7 +89,7 @@ impl LogToPhys for AddressMap {
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
/*
|
|
||||||
pub fn node_at_log<'a, T: LogToPhys>(image: &'a [u8], addr: &T, log: u64) -> Result<&'a [u8], ParseError> {
|
pub fn node_at_log<'a, T: LogToPhys>(image: &'a [u8], addr: &T, log: u64) -> Result<&'a [u8], ParseError> {
|
||||||
if let Some(phys_addr) = addr.to_phys(log) {
|
if let Some(phys_addr) = addr.to_phys(log) {
|
||||||
Ok(&image[phys_addr as usize .. phys_addr as usize + NODE_SIZE])
|
Ok(&image[phys_addr as usize .. phys_addr as usize + NODE_SIZE])
|
||||||
@ -99,16 +97,7 @@ pub fn node_at_log<'a, T: LogToPhys>(image: &'a [u8], addr: &T, log: u64) -> Res
|
|||||||
err!("Logical address {:x} could not be translated to physical address", log)
|
err!("Logical address {:x} could not be translated to physical address", log)
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
*/
|
|
||||||
|
|
||||||
pub trait LogToPhys {
|
pub trait LogToPhys {
|
||||||
fn to_phys(&self, log: u64) -> Option<u64>;
|
fn to_phys(&self, log: u64) -> Option<u64>;
|
||||||
|
|
||||||
fn node_at_log<'a>(&self, image: &'a [u8], log: u64) -> Result<&'a [u8], ParseError> {
|
|
||||||
if let Some(phys_addr) = self.to_phys(log) {
|
|
||||||
Ok(&image[phys_addr as usize .. phys_addr as usize + NODE_SIZE])
|
|
||||||
} else {
|
|
||||||
err!("Logical address {:x} could not be translated to physical address", log)
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
}
|
297
src/btrfs_lookup.rs
Normal file
297
src/btrfs_lookup.rs
Normal file
@ -0,0 +1,297 @@
|
|||||||
|
use std::rc::Rc;
|
||||||
|
use std::ops::{Deref, RangeBounds};
|
||||||
|
|
||||||
|
use crate::btrfs_structs::{Leaf, Key, Item, InteriorNode, Node, ParseError, ParseBin, Value, Superblock, ItemType};
|
||||||
|
use crate::addrmap::{node_at_log, LogToPhys, AddressMap};
|
||||||
|
|
||||||
|
/// represents a B-Tree inside a filesystem image. Can be used to look up keys,
|
||||||
|
/// and handles the tree traversal and the virtual address translation.
|
||||||
|
pub struct Tree<'a> {
|
||||||
|
pub image: &'a [u8],
|
||||||
|
pub addr_map: Rc<AddressMap>,
|
||||||
|
pub root_addr_log: u64,
|
||||||
|
}
|
||||||
|
|
||||||
|
impl<'a> Tree<'a> {
|
||||||
|
pub fn new<T: Into<u64>>(image: &'a [u8], tree_id: T) -> Result<Tree<'a>, ParseError> {
|
||||||
|
let addr_map = Rc::new(AddressMap::new(image)?);
|
||||||
|
let superblock = Superblock::parse(&image[0x10000..])?;
|
||||||
|
|
||||||
|
let root_tree = Tree {
|
||||||
|
image,
|
||||||
|
addr_map: Rc::clone(&addr_map),
|
||||||
|
root_addr_log: superblock.root
|
||||||
|
};
|
||||||
|
let tree_root_item = root_tree.find_key(Key::new(tree_id.into(), ItemType::Root, 0))?;
|
||||||
|
|
||||||
|
let root_addr_log = match tree_root_item.value {
|
||||||
|
Value::Root(root) => root.bytenr,
|
||||||
|
_ => return Err("root item invalid".into())
|
||||||
|
};
|
||||||
|
|
||||||
|
Ok(Tree { image, addr_map, root_addr_log })
|
||||||
|
}
|
||||||
|
|
||||||
|
pub fn root(image: &'a [u8]) -> Result<Tree<'a>, ParseError> {
|
||||||
|
let addr_map = Rc::new(AddressMap::new(image)?);
|
||||||
|
let superblock = Superblock::parse(&image[0x10000..])?;
|
||||||
|
|
||||||
|
Ok(Tree { image, addr_map, root_addr_log: superblock.root })
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
/***** looking up keys *****/
|
||||||
|
|
||||||
|
impl Leaf {
|
||||||
|
pub fn find_key(&self, key: Key) -> Option<Item> {
|
||||||
|
self.items
|
||||||
|
.iter()
|
||||||
|
.find(|x|x.key == key)
|
||||||
|
.map(|x|x.clone())
|
||||||
|
}
|
||||||
|
|
||||||
|
pub fn find_key_or_previous(&self, key: Key) -> Option<Item> {
|
||||||
|
self.items
|
||||||
|
.iter()
|
||||||
|
.take_while(|x|x.key <= key)
|
||||||
|
.last()
|
||||||
|
.map(|x|x.clone())
|
||||||
|
}
|
||||||
|
|
||||||
|
}
|
||||||
|
|
||||||
|
impl InteriorNode {
|
||||||
|
pub fn find_key_or_previous(&self, key: Key) -> Option<u64> {
|
||||||
|
self.children
|
||||||
|
.iter()
|
||||||
|
.take_while(|x|x.key <= key)
|
||||||
|
.last()
|
||||||
|
.map(|x|x.ptr)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
fn find_key_in_node<T: LogToPhys>(image: &[u8], addr: &T, root_addr_log: u64, key: Key) -> Result<Item, ParseError> {
|
||||||
|
let node = Node::parse(node_at_log(image, addr, root_addr_log)?)?;
|
||||||
|
|
||||||
|
match node {
|
||||||
|
Node::Interior(interior_node) => {
|
||||||
|
let next_node_log = interior_node.find_key_or_previous(key).unwrap();
|
||||||
|
find_key_in_node(image, addr, next_node_log, key)
|
||||||
|
},
|
||||||
|
Node::Leaf(leaf) => {
|
||||||
|
leaf.find_key(key).ok_or(
|
||||||
|
error!(
|
||||||
|
"Item with key ({},{:?},{}) was not found in the leaf at logical address 0x{:x}",
|
||||||
|
key.key_id, key.key_type, key.key_offset, root_addr_log)
|
||||||
|
)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
impl Tree<'_> {
|
||||||
|
pub fn find_key(&self, key: Key) -> Result<Item, ParseError> {
|
||||||
|
find_key_in_node(self.image, self.addr_map.deref(), self.root_addr_log, key)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
/***** iterator *****/
|
||||||
|
|
||||||
|
pub struct RangeIter<'a, R: RangeBounds<Key>, F: Fn(Key) -> Key = fn(Key) -> Key> {
|
||||||
|
tree: &'a Tree<'a>,
|
||||||
|
|
||||||
|
// path to the last returned item
|
||||||
|
nodes: Vec<InteriorNode>,
|
||||||
|
leaf: Option<Box<Leaf>>,
|
||||||
|
indices: Vec<usize>,
|
||||||
|
|
||||||
|
bounds: R,
|
||||||
|
skip_fn: F,
|
||||||
|
}
|
||||||
|
|
||||||
|
impl Tree<'_> {
|
||||||
|
pub fn iter<'a>(&'a self) -> RangeIter<'a> {
|
||||||
|
self.range(None, None)
|
||||||
|
}
|
||||||
|
|
||||||
|
pub fn range<'a>(&'a self, lower: Option<Key>, upper: Option<Key>) -> RangeIter<'a> {
|
||||||
|
RangeIter {
|
||||||
|
tree: self,
|
||||||
|
nodes: Vec::new(),
|
||||||
|
leaf: None,
|
||||||
|
indices: Vec::new(), // in nodes and leaf
|
||||||
|
lower_limit: lower,
|
||||||
|
upper_limit: upper,
|
||||||
|
skip_fn: |x|x
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
pub fn range_id<'a>(&'a self, id: u64) -> RangeIter<'a> {
|
||||||
|
if id == u64::MAX {
|
||||||
|
self.range(
|
||||||
|
Some(Key::new(id, ItemType::Invalid, 0)),
|
||||||
|
None
|
||||||
|
)
|
||||||
|
} else {
|
||||||
|
self.range(
|
||||||
|
Some(Key::new(id, ItemType::Invalid, 0)),
|
||||||
|
Some(Key::new(id+1, ItemType::Invalid, 0))
|
||||||
|
)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
/// given a tree, a range of indices, and two "skip functions", produces a double
|
||||||
|
/// ended iterator which iterates through the keys contained in the range, in ascending
|
||||||
|
/// or descending order.
|
||||||
|
|
||||||
|
/// the skip functions are ignored for now, but are intended as an optimization:
|
||||||
|
/// after a key `k` was returned by the iterator (or the reverse iterator), all keys
|
||||||
|
/// strictly lower than `forward_skip_fn(k)` are skipped (resp. all keys strictly above
|
||||||
|
/// `backward_skip_fn` are skipped.
|
||||||
|
pub fn range_with_skip<'a, R, F>(&'a self, range: R, forward_skip_fn: F, backward_skip_fn: F) -> RangeIter<'a, F>
|
||||||
|
where
|
||||||
|
R: RangeBounds<Key>,
|
||||||
|
F: Fn(Key) -> Key {
|
||||||
|
RangeIter {
|
||||||
|
tree: self,
|
||||||
|
nodes: Vec::new(),
|
||||||
|
leaf: None,
|
||||||
|
indices: Vec::new(),
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
impl<F: Fn(Key) -> Key> RangeIter<'_, F> {
|
||||||
|
fn move_down_and_get_first_item(&mut self, mut node_addr: u64) -> Option<Item> {
|
||||||
|
loop {
|
||||||
|
let node = Node::parse(node_at_log(self.tree.image, self.tree.addr_map.deref(), node_addr).ok()?).ok()?;
|
||||||
|
match node {
|
||||||
|
Node::Interior(int_node) => {
|
||||||
|
node_addr = int_node.children.first()?.ptr;
|
||||||
|
self.nodes.push(int_node);
|
||||||
|
self.indices.push(0);
|
||||||
|
},
|
||||||
|
Node::Leaf(leaf_node) => {
|
||||||
|
let result = leaf_node.items.first()?.clone();
|
||||||
|
self.leaf = Some(Box::new(leaf_node));
|
||||||
|
self.indices.push(0);
|
||||||
|
return Some(result);
|
||||||
|
},
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
fn move_down_and_get_item_or_previous(&mut self, mut node_addr: u64, key: Key) -> Option<Item> {
|
||||||
|
loop {
|
||||||
|
let node = Node::parse(node_at_log(self.tree.image, self.tree.addr_map.deref(), node_addr).ok()?).ok()?;
|
||||||
|
|
||||||
|
match node {
|
||||||
|
Node::Interior(int_node) => {
|
||||||
|
let (i, new_node_ptr) = int_node
|
||||||
|
.children
|
||||||
|
.iter()
|
||||||
|
.enumerate()
|
||||||
|
.take_while(|(_,bp)|bp.key <= key)
|
||||||
|
.last()?;
|
||||||
|
|
||||||
|
node_addr = new_node_ptr.ptr;
|
||||||
|
self.nodes.push(int_node);
|
||||||
|
self.indices.push(i);
|
||||||
|
},
|
||||||
|
Node::Leaf(leaf_node) => {
|
||||||
|
let (i, result) = leaf_node
|
||||||
|
.items
|
||||||
|
.iter()
|
||||||
|
.enumerate()
|
||||||
|
.take_while(|(_,item)|item.key <= key)
|
||||||
|
.last()?;
|
||||||
|
|
||||||
|
let result_cloned = result.clone();
|
||||||
|
self.leaf = Some(Box::new(leaf_node));
|
||||||
|
self.indices.push(i);
|
||||||
|
return Some(result_cloned);
|
||||||
|
},
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
impl<F: Fn(Key) -> Key> Iterator for RangeIter<'_, F> {
|
||||||
|
type Item = Item;
|
||||||
|
|
||||||
|
// for now we just silently stop when we encounter an error, maybe that isn't the best solution
|
||||||
|
fn next(&mut self) -> Option<Item> {
|
||||||
|
if self.leaf.is_none() && self.nodes.len() == 0 {
|
||||||
|
// first item
|
||||||
|
// finding the first item is a bit tricky
|
||||||
|
// if there is a lower limit, the B+ tree only allows us to either find the item
|
||||||
|
// or the previous one if there is no exact match; in the latter case, go one further
|
||||||
|
|
||||||
|
let result = if let Some(lim) = self.lower_limit {
|
||||||
|
let first_res = self.move_down_and_get_item_or_previous(self.tree.root_addr_log, lim);
|
||||||
|
if let Some(item) = first_res {
|
||||||
|
if item.key == lim {
|
||||||
|
// found exactly the limit, that's the easy case
|
||||||
|
Some(item)
|
||||||
|
} else {
|
||||||
|
// found a previous item; so we want the next one
|
||||||
|
self.next()
|
||||||
|
}
|
||||||
|
} else {
|
||||||
|
// did not find an item, so everything must come after lower limit
|
||||||
|
// just get the first
|
||||||
|
self.move_down_and_get_first_item(self.tree.root_addr_log)
|
||||||
|
}
|
||||||
|
} else {
|
||||||
|
// there is no lower limit, so also just get the first
|
||||||
|
self.move_down_and_get_first_item(self.tree.root_addr_log)
|
||||||
|
};
|
||||||
|
|
||||||
|
result.filter(|item|self.upper_limit.is_none() || item.key < self.upper_limit.unwrap())
|
||||||
|
} else if self.leaf.is_none() {
|
||||||
|
// already through the iterator
|
||||||
|
return None;
|
||||||
|
} else {
|
||||||
|
let height = self.indices.len(); // must be at least 1
|
||||||
|
let leaf = self.leaf.as_ref().unwrap();
|
||||||
|
|
||||||
|
self.indices[height-1] += 1;
|
||||||
|
if let Some(item) = leaf.items.get(self.indices[height-1]) {
|
||||||
|
// there's a next item in the same leaf
|
||||||
|
if self.upper_limit.is_none() || item.key < self.upper_limit.unwrap() {
|
||||||
|
return Some(item.clone());
|
||||||
|
} else {
|
||||||
|
return None;
|
||||||
|
}
|
||||||
|
} else if height == 1 {
|
||||||
|
// the tree has height 1 and we're through the (only) leaf, there's nothing left
|
||||||
|
return None;
|
||||||
|
} else {
|
||||||
|
// try to advance in one of the higher nodes
|
||||||
|
self.leaf = None;
|
||||||
|
self.indices.pop();
|
||||||
|
let mut level = height - 2;
|
||||||
|
|
||||||
|
// go up until we can move forward in a node
|
||||||
|
let node_addr = loop {
|
||||||
|
let node = &self.nodes[level];
|
||||||
|
|
||||||
|
self.indices[level] += 1;
|
||||||
|
if let Some(blockptr) = node.children.get(self.indices[level]) {
|
||||||
|
break blockptr.ptr;
|
||||||
|
} else {
|
||||||
|
if level == 0 {
|
||||||
|
return None;
|
||||||
|
}
|
||||||
|
self.indices.pop();
|
||||||
|
self.nodes.pop();
|
||||||
|
level -= 1;
|
||||||
|
}
|
||||||
|
};
|
||||||
|
|
||||||
|
// first first item under this node
|
||||||
|
return self.move_down_and_get_first_item(node_addr)
|
||||||
|
.filter(|item|self.upper_limit.is_none() || item.key < self.upper_limit.unwrap())
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
@ -1,5 +1,5 @@
|
|||||||
use btrfs_parse_derive::AllVariants;
|
use binparse_derive::AllVariants;
|
||||||
use btrfs_parse_derive::ParseBin;
|
use binparse_derive::ParseBin;
|
||||||
use std::fmt;
|
use std::fmt;
|
||||||
use std::error;
|
use std::error;
|
||||||
use std::ffi::CString;
|
use std::ffi::CString;
|
||||||
@ -12,11 +12,11 @@ pub const NODE_SIZE: usize = 0x4000;
|
|||||||
#[derive(Debug,Clone,Copy,AllVariants,PartialEq,Eq,PartialOrd,Ord)]
|
#[derive(Debug,Clone,Copy,AllVariants,PartialEq,Eq,PartialOrd,Ord)]
|
||||||
#[repr(u8)]
|
#[repr(u8)]
|
||||||
pub enum ItemType {
|
pub enum ItemType {
|
||||||
Invalid = 0x00, // invalid, but seems to exist?
|
Invalid = 0x00, // invalid
|
||||||
Inode = 0x01, // implemented
|
Inode = 0x01, // implemented
|
||||||
Ref = 0x0c, // implemented
|
Ref = 0x0c, // implemented
|
||||||
ExtRef = 0x0d,
|
ExtRef = 0x0d,
|
||||||
XAttr = 0x18, // TODO
|
XAttr = 0x18,
|
||||||
VerityDesc = 0x24,
|
VerityDesc = 0x24,
|
||||||
VerityMerkle = 0x25,
|
VerityMerkle = 0x25,
|
||||||
Orphan = 0x30,
|
Orphan = 0x30,
|
||||||
@ -25,12 +25,12 @@ pub enum ItemType {
|
|||||||
Dir = 0x54, // implemented (better with len feature; allow multiple?)
|
Dir = 0x54, // implemented (better with len feature; allow multiple?)
|
||||||
DirIndex = 0x60, // implemented
|
DirIndex = 0x60, // implemented
|
||||||
ExtentData = 0x6c, // implemented
|
ExtentData = 0x6c, // implemented
|
||||||
ExtentCsum = 0x80, // TODO
|
ExtentCsum = 0x80,
|
||||||
Root = 0x84, // implemented
|
Root = 0x84, // implemented
|
||||||
RootBackRef = 0x90, // implemented
|
RootBackRef = 0x90,
|
||||||
RootRef = 0x9c, // implemented
|
RootRef = 0x9c,
|
||||||
Extent = 0xa8, // implemented (with only one version of extra data!!)
|
Extent = 0xa8, // implemented (with only one version of extra data)
|
||||||
Metadata = 0xa9, // implemented (with only one version of extra data!!)
|
Metadata = 0xa9, // implemented (with only one version of extra data)
|
||||||
TreeBlockRef = 0xb0,
|
TreeBlockRef = 0xb0,
|
||||||
ExtentDataRef = 0xb2,
|
ExtentDataRef = 0xb2,
|
||||||
ExtentRefV0 = 0xb4,
|
ExtentRefV0 = 0xb4,
|
||||||
@ -53,7 +53,6 @@ pub enum ItemType {
|
|||||||
UUIDSubvol = 0xfb, // implemented
|
UUIDSubvol = 0xfb, // implemented
|
||||||
UUIDReceivedSubvol = 0xfc,
|
UUIDReceivedSubvol = 0xfc,
|
||||||
String = 0xfd,
|
String = 0xfd,
|
||||||
InvalidMax = 0xff, // invalid
|
|
||||||
}
|
}
|
||||||
|
|
||||||
#[allow(unused)]
|
#[allow(unused)]
|
||||||
@ -74,9 +73,6 @@ impl Key {
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
pub const ZERO_KEY: Key = Key {key_id: 0, key_type: ItemType::Invalid, key_offset: 0};
|
|
||||||
pub const LAST_KEY: Key = Key {key_id: 0xffff_ffff_ffff_ffff, key_type: ItemType::InvalidMax, key_offset: 0xffff_ffff_ffff_ffff};
|
|
||||||
|
|
||||||
#[allow(unused)]
|
#[allow(unused)]
|
||||||
#[derive(Debug,Clone)]
|
#[derive(Debug,Clone)]
|
||||||
pub enum Value {
|
pub enum Value {
|
||||||
@ -94,7 +90,6 @@ pub enum Value {
|
|||||||
DevExtent(DevExtentItem),
|
DevExtent(DevExtentItem),
|
||||||
ExtentData(ExtentDataItem),
|
ExtentData(ExtentDataItem),
|
||||||
Ref(RefItem),
|
Ref(RefItem),
|
||||||
RootRef(RootRefItem),
|
|
||||||
Unknown(Vec<u8>),
|
Unknown(Vec<u8>),
|
||||||
}
|
}
|
||||||
|
|
||||||
@ -309,70 +304,68 @@ pub struct RootItem {
|
|||||||
pub otime: Time,
|
pub otime: Time,
|
||||||
pub stime: Time,
|
pub stime: Time,
|
||||||
pub rtime: Time,
|
pub rtime: Time,
|
||||||
|
|
||||||
data: Vec<u8>,
|
|
||||||
}
|
}
|
||||||
|
|
||||||
#[allow(unused)]
|
#[allow(unused)]
|
||||||
#[derive(Debug,Clone,ParseBin)]
|
#[derive(Debug,Clone,ParseBin)]
|
||||||
pub struct DirItem {
|
pub struct DirItem {
|
||||||
pub location: Key,
|
location: Key,
|
||||||
pub transid: u64,
|
transid: u64,
|
||||||
pub data_len: u16,
|
data_len: u16,
|
||||||
pub name_len: u16,
|
name_len: u16,
|
||||||
pub dir_type: u8,
|
dir_type: u8,
|
||||||
|
|
||||||
#[len = "name_len"]
|
// #[len = "name_len"]
|
||||||
pub name: CString,
|
name: CString,
|
||||||
}
|
}
|
||||||
|
|
||||||
#[allow(unused)]
|
#[allow(unused)]
|
||||||
#[derive(Debug,Clone,ParseBin)]
|
#[derive(Debug,Clone,ParseBin)]
|
||||||
pub struct FreeSpaceInfoItem {
|
pub struct FreeSpaceInfoItem {
|
||||||
pub extent_count: u32,
|
extent_count: u32,
|
||||||
pub flags: u32,
|
flags: u32,
|
||||||
}
|
}
|
||||||
|
|
||||||
#[allow(unused)]
|
#[allow(unused)]
|
||||||
#[derive(Debug,Clone,ParseBin)]
|
#[derive(Debug,Clone,ParseBin)]
|
||||||
pub struct UUIDSubvolItem {
|
pub struct UUIDSubvolItem {
|
||||||
pub subvol_id: u64,
|
subvol_id: u64,
|
||||||
}
|
}
|
||||||
|
|
||||||
#[allow(unused)]
|
#[allow(unused)]
|
||||||
#[derive(Debug,Clone,ParseBin)]
|
#[derive(Debug,Clone,ParseBin)]
|
||||||
pub struct DevItem {
|
pub struct DevItem {
|
||||||
pub devid: u64,
|
devid: u64,
|
||||||
pub total_bytes: u64,
|
total_bytes: u64,
|
||||||
pub bytes_used: u64,
|
bytes_used: u64,
|
||||||
pub io_align: u32,
|
io_align: u32,
|
||||||
pub io_width: u32,
|
io_width: u32,
|
||||||
pub sector_size: u32,
|
sector_size: u32,
|
||||||
pub dev_type: u64,
|
dev_type: u64,
|
||||||
pub generation: u64,
|
generation: u64,
|
||||||
pub start_offset: u64,
|
start_offset: u64,
|
||||||
pub dev_group: u32,
|
dev_group: u32,
|
||||||
pub seek_speed: u8,
|
seek_speed: u8,
|
||||||
pub bandwidth: u8,
|
bandwidth: u8,
|
||||||
pub uuid: UUID,
|
uuid: UUID,
|
||||||
pub fsid: UUID,
|
fsid: UUID,
|
||||||
}
|
}
|
||||||
|
|
||||||
#[allow(unused)]
|
#[allow(unused)]
|
||||||
#[derive(Debug,Clone,ParseBin)]
|
#[derive(Debug,Clone,ParseBin)]
|
||||||
pub struct DevExtentItem {
|
pub struct DevExtentItem {
|
||||||
pub chunk_tree: u64,
|
chunk_tree: u64,
|
||||||
pub chunk_objectid: u64,
|
chunk_objectid: u64,
|
||||||
pub chunk_offset: u64,
|
chunk_offset: u64,
|
||||||
pub length: u64,
|
length: u64,
|
||||||
pub chunk_tree_uuid: UUID,
|
chunk_tree_uuid: UUID,
|
||||||
}
|
}
|
||||||
|
|
||||||
#[allow(unused)]
|
#[allow(unused)]
|
||||||
#[derive(Debug,Clone)]
|
#[derive(Debug,Clone)]
|
||||||
pub struct ExtentDataItem {
|
pub struct ExtentDataItem {
|
||||||
pub header: ExtentDataHeader,
|
header: ExtentDataHeader,
|
||||||
pub data: ExtentDataBody,
|
data: ExtentDataBody,
|
||||||
}
|
}
|
||||||
|
|
||||||
#[allow(unused)]
|
#[allow(unused)]
|
||||||
@ -385,42 +378,31 @@ pub enum ExtentDataBody {
|
|||||||
#[allow(unused)]
|
#[allow(unused)]
|
||||||
#[derive(Debug,Clone,ParseBin)]
|
#[derive(Debug,Clone,ParseBin)]
|
||||||
pub struct ExternalExtent {
|
pub struct ExternalExtent {
|
||||||
pub disk_bytenr: u64,
|
disk_bytenr: u64,
|
||||||
pub disk_num_bytes: u64,
|
disk_num_bytes: u64,
|
||||||
pub offset: u64,
|
offset: u64,
|
||||||
pub num_bytes: u64,
|
num_bytes: u64,
|
||||||
}
|
}
|
||||||
|
|
||||||
#[allow(unused)]
|
#[allow(unused)]
|
||||||
#[derive(Debug,Clone,ParseBin)]
|
#[derive(Debug,Clone,ParseBin)]
|
||||||
pub struct ExtentDataHeader {
|
pub struct ExtentDataHeader {
|
||||||
pub generation: u64,
|
generation: u64,
|
||||||
pub ram_bytes: u64,
|
ram_bytes: u64,
|
||||||
pub compression: u8,
|
compression: u8,
|
||||||
pub encryption: u8,
|
encryption: u8,
|
||||||
pub other_encoding: u16,
|
other_encoding: u16,
|
||||||
pub extent_type: u8,
|
extent_type: u8,
|
||||||
}
|
}
|
||||||
|
|
||||||
#[allow(unused)]
|
#[allow(unused)]
|
||||||
#[derive(Debug,Clone,ParseBin)]
|
#[derive(Debug,Clone,ParseBin)]
|
||||||
pub struct RefItem {
|
pub struct RefItem {
|
||||||
pub index: u64,
|
index: u64,
|
||||||
pub name_len: u16,
|
name_len: u16,
|
||||||
|
|
||||||
#[len = "name_len"]
|
// #[len = "name_len"]
|
||||||
pub name: CString,
|
name: Vec<u8>,
|
||||||
}
|
|
||||||
|
|
||||||
#[allow(unused)]
|
|
||||||
#[derive(Debug,Clone,ParseBin)]
|
|
||||||
pub struct RootRefItem {
|
|
||||||
pub directory: u64,
|
|
||||||
pub index: u64,
|
|
||||||
pub name_len: u16,
|
|
||||||
|
|
||||||
#[len = "name_len"]
|
|
||||||
pub name: CString,
|
|
||||||
}
|
}
|
||||||
|
|
||||||
#[allow(unused)]
|
#[allow(unused)]
|
||||||
@ -544,7 +526,7 @@ impl ParseBin for CString {
|
|||||||
fn parse_len(bytes: &[u8]) -> Result<(Self, usize), ParseError> {
|
fn parse_len(bytes: &[u8]) -> Result<(Self, usize), ParseError> {
|
||||||
let mut chars = Vec::from(bytes);
|
let mut chars = Vec::from(bytes);
|
||||||
chars.push(0);
|
chars.push(0);
|
||||||
Ok((CString::from_vec_with_nul(chars).unwrap_or(CString::new("<invalid string>").unwrap()), bytes.len()))
|
Ok((CString::from_vec_with_nul(chars).unwrap(), bytes.len()))
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
@ -561,10 +543,7 @@ impl From<u8> for ItemType {
|
|||||||
let variants = ItemType::all_variants();
|
let variants = ItemType::all_variants();
|
||||||
match variants.binary_search_by_key(&value, |x|u8::from(*x)) {
|
match variants.binary_search_by_key(&value, |x|u8::from(*x)) {
|
||||||
Ok(idx) => variants[idx],
|
Ok(idx) => variants[idx],
|
||||||
Err(_) => {
|
Err(_) => ItemType::Invalid,
|
||||||
println!("Unknown item type: {}", value);
|
|
||||||
ItemType::Invalid
|
|
||||||
},
|
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
@ -575,17 +554,8 @@ impl ParseBin for ItemType {
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
fn parse_check_size<T: ParseBin>(bytes: &[u8]) -> Result<T, ParseError> {
|
|
||||||
let (result, real_len) = T::parse_len(bytes)?;
|
|
||||||
if real_len != bytes.len() {
|
|
||||||
eprintln!("{} parsing incomplete! Parsed {} of {} bytes", std::any::type_name::<T>(), real_len, bytes.len());
|
|
||||||
}
|
|
||||||
Ok(result)
|
|
||||||
}
|
|
||||||
|
|
||||||
impl ParseBin for Node {
|
impl ParseBin for Node {
|
||||||
fn parse_len(bytes: &[u8]) -> Result<(Node, usize), ParseError> {
|
fn parse_len(bytes: &[u8]) -> Result<(Node, usize), ParseError> {
|
||||||
|
|
||||||
if bytes.len() < 0x65 {
|
if bytes.len() < 0x65 {
|
||||||
return err!("Not enough data to parse node header");
|
return err!("Not enough data to parse node header");
|
||||||
}
|
}
|
||||||
@ -616,45 +586,40 @@ impl ParseBin for Node {
|
|||||||
|
|
||||||
let value = match key.key_type {
|
let value = match key.key_type {
|
||||||
ItemType::BlockGroup =>
|
ItemType::BlockGroup =>
|
||||||
Value::BlockGroup(parse_check_size(data_slice)?),
|
Value::BlockGroup(BlockGroupItem::parse(data_slice)?),
|
||||||
ItemType::Metadata => {
|
ItemType::Metadata => {
|
||||||
let item: ExtentItem = parse_check_size(data_slice)?;
|
let item = ExtentItem::parse(data_slice)?;
|
||||||
if item.flags != 2 || item.refs > 1 {
|
if item.flags != 2 || item.refs > 1 {
|
||||||
println!("Metadata item with refs = {}, flags = {}, data = {:x?}", item.refs, item.flags, &data_slice[0x18..]);
|
println!("Metadata item with refs = {}, flags = {}, data = {:x?}", item.refs, item.flags, &data_slice[0x18..]);
|
||||||
}
|
}
|
||||||
Value::Extent(item)
|
Value::Extent(item)
|
||||||
},
|
},
|
||||||
ItemType::Extent =>
|
ItemType::Extent =>
|
||||||
Value::Extent(parse_check_size(data_slice)?),
|
Value::Extent(ExtentItem::parse(data_slice)?),
|
||||||
ItemType::Inode =>
|
ItemType::Inode =>
|
||||||
Value::Inode(parse_check_size(data_slice)?),
|
Value::Inode(InodeItem::parse(data_slice)?),
|
||||||
ItemType::Root =>
|
ItemType::Root =>
|
||||||
Value::Root(parse_check_size(data_slice)?),
|
Value::Root(RootItem::parse(data_slice)?),
|
||||||
ItemType::Dir =>
|
ItemType::Dir =>
|
||||||
Value::Dir(parse_check_size(data_slice)?),
|
Value::Dir(DirItem::parse(data_slice)?),
|
||||||
ItemType::DirIndex =>
|
ItemType::DirIndex =>
|
||||||
Value::DirIndex(parse_check_size(data_slice)?),
|
Value::DirIndex(DirItem::parse(data_slice)?),
|
||||||
ItemType::Chunk =>
|
ItemType::Chunk =>
|
||||||
Value::Chunk(parse_check_size(data_slice)?),
|
Value::Chunk(ChunkItem::parse(data_slice)?),
|
||||||
ItemType::FreeSpaceInfo =>
|
ItemType::FreeSpaceInfo =>
|
||||||
Value::FreeSpaceInfo(parse_check_size(data_slice)?),
|
Value::FreeSpaceInfo(FreeSpaceInfoItem::parse(data_slice)?),
|
||||||
ItemType::FreeSpaceExtent =>
|
ItemType::FreeSpaceExtent =>
|
||||||
Value::FreeSpaceExtent,
|
Value::FreeSpaceExtent,
|
||||||
ItemType::UUIDSubvol =>
|
ItemType::UUIDSubvol =>
|
||||||
Value::UUIDSubvol(parse_check_size(data_slice)?),
|
Value::UUIDSubvol(UUIDSubvolItem::parse(data_slice)?),
|
||||||
ItemType::Dev =>
|
ItemType::Dev =>
|
||||||
Value::Dev(parse_check_size(data_slice)?),
|
Value::Dev(DevItem::parse(data_slice)?),
|
||||||
ItemType::DevExtent =>
|
ItemType::DevExtent =>
|
||||||
Value::DevExtent(parse_check_size(data_slice)?),
|
Value::DevExtent(DevExtentItem::parse(data_slice)?),
|
||||||
ItemType::ExtentData =>
|
ItemType::ExtentData =>
|
||||||
Value::ExtentData(parse_check_size(data_slice)?),
|
Value::ExtentData(ExtentDataItem::parse(data_slice)?),
|
||||||
ItemType::Ref => {
|
ItemType::Ref =>
|
||||||
Value::Ref(parse_check_size(data_slice)?)
|
Value::Ref(RefItem::parse(data_slice)?),
|
||||||
}
|
|
||||||
ItemType::RootRef =>
|
|
||||||
Value::RootRef(parse_check_size(data_slice)?),
|
|
||||||
ItemType::RootBackRef =>
|
|
||||||
Value::RootRef(parse_check_size(data_slice)?),
|
|
||||||
_ =>
|
_ =>
|
||||||
Value::Unknown(Vec::from(data_slice)),
|
Value::Unknown(Vec::from(data_slice)),
|
||||||
};
|
};
|
||||||
@ -754,7 +719,6 @@ impl fmt::Debug for Checksum {
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
#[macro_export]
|
|
||||||
macro_rules! key {
|
macro_rules! key {
|
||||||
($arg1:expr) => {
|
($arg1:expr) => {
|
||||||
btrfs_structs::Key { key_id: $arg1, key_type: btrfs_structs::ItemType::Invalid, key_offset: 0 }
|
btrfs_structs::Key { key_id: $arg1, key_type: btrfs_structs::ItemType::Invalid, key_offset: 0 }
|
||||||
@ -767,4 +731,4 @@ macro_rules! key {
|
|||||||
};
|
};
|
||||||
}
|
}
|
||||||
|
|
||||||
//pub(crate) use key;
|
pub(crate) use key;
|
@ -3,11 +3,6 @@ pub mod util;
|
|||||||
pub mod btrfs_structs;
|
pub mod btrfs_structs;
|
||||||
pub mod btrfs_lookup;
|
pub mod btrfs_lookup;
|
||||||
pub mod addrmap;
|
pub mod addrmap;
|
||||||
pub mod nodereader;
|
|
||||||
pub mod http_tree;
|
|
||||||
pub mod render_common;
|
|
||||||
pub mod render_tree;
|
|
||||||
pub mod main_error;
|
|
||||||
|
|
||||||
#[cfg(test)]
|
#[cfg(test)]
|
||||||
mod test;
|
mod test;
|
@ -1,13 +1,14 @@
|
|||||||
use std::{
|
use std::{
|
||||||
collections::HashMap, env, fs::{File, OpenOptions}, iter,
|
iter,
|
||||||
|
env,
|
||||||
|
fs::OpenOptions,
|
||||||
|
collections::HashMap,
|
||||||
};
|
};
|
||||||
use memmap2::MmapOptions;
|
use memmap2::Mmap;
|
||||||
use rouille::{Request, Response, router};
|
use rouille::{Request, Response, router};
|
||||||
use btrfs_explorer::{
|
use parsebtrfs::{
|
||||||
btrfs_structs::{TreeID, Value::Extent, Value::BlockGroup, NODE_SIZE, ItemType},
|
btrfs_structs::{TreeID, Value::Extent, Value::BlockGroup, ParseError, NODE_SIZE, ItemType},
|
||||||
btrfs_lookup::Tree,
|
btrfs_lookup::Tree,
|
||||||
addrmap::AddressMap,
|
|
||||||
main_error::MainError,
|
|
||||||
};
|
};
|
||||||
|
|
||||||
const COLORS: &[&str] = &["#e6194b", "#3cb44b", "#ffe119", "#4363d8", "#f58231", "#911eb4", "#46f0f0", "#f032e6", "#bcf60c", "#fabebe", "#008080", "#e6beff", "#9a6324", "#fffac8", "#800000", "#aaffc3", "#808000", "#ffd8b1", "#000075", "#808080", "#000000"];
|
const COLORS: &[&str] = &["#e6194b", "#3cb44b", "#ffe119", "#4363d8", "#f58231", "#911eb4", "#46f0f0", "#f032e6", "#bcf60c", "#fabebe", "#008080", "#e6beff", "#9a6324", "#fffac8", "#800000", "#aaffc3", "#808000", "#ffd8b1", "#000075", "#808080", "#000000"];
|
||||||
@ -15,13 +16,12 @@ const COLORS: &[&str] = &["#e6194b", "#3cb44b", "#ffe119", "#4363d8", "#f58231",
|
|||||||
fn main() -> Result<(), MainError> {
|
fn main() -> Result<(), MainError> {
|
||||||
let filename = env::args().skip(1).next().ok_or("Argument required")?;
|
let filename = env::args().skip(1).next().ok_or("Argument required")?;
|
||||||
|
|
||||||
/*
|
|
||||||
let file = OpenOptions::new().read(true).open(filename)?;
|
let file = OpenOptions::new().read(true).open(filename)?;
|
||||||
let image = unsafe { Mmap::map(&file)? };
|
let image = unsafe { Mmap::map(&file)? };
|
||||||
*/
|
|
||||||
|
|
||||||
let file = OpenOptions::new().read(true).open(filename)?;
|
const O_DIRECT: i32 = 0x4000;
|
||||||
let image = unsafe { MmapOptions::new().len(493921239040usize).map(&file)? };
|
// let file = OpenOptions::new().read(true).custom_flags(O_DIRECT).open(filename)?;
|
||||||
|
// let image = unsafe { MmapOptions::new().len(493921239040usize).map(&file)? };
|
||||||
|
|
||||||
// return Ok(());
|
// return Ok(());
|
||||||
|
|
||||||
@ -37,21 +37,8 @@ fn main() -> Result<(), MainError> {
|
|||||||
rouille::start_server("127.0.0.1:8080", move |request| {
|
rouille::start_server("127.0.0.1:8080", move |request| {
|
||||||
router!(
|
router!(
|
||||||
request,
|
request,
|
||||||
(GET) ["/"] =>
|
(GET) ["/"] => http_main_boxes(&image, request),
|
||||||
http_main_boxes(&image, request),
|
|
||||||
(GET) ["/root"] =>
|
|
||||||
btrfs_explorer::http_tree::http_root(&image, None, request),
|
|
||||||
(GET) ["/tree/{tree}", tree: String] =>
|
|
||||||
btrfs_explorer::http_tree::http_tree(&image, &tree, None, request.get_param("key").as_deref(), request).unwrap(),
|
|
||||||
(GET) ["/tree/{tree}/{key}", tree: String, key: String] =>
|
|
||||||
btrfs_explorer::http_tree::http_tree(&image, &tree, None, Some(&key), request).unwrap(),
|
|
||||||
(GET) ["/tree/{tree}?key={key}", tree: String, key: String] =>
|
|
||||||
btrfs_explorer::http_tree::http_tree(&image, &tree, None, Some(&key), request).unwrap(),
|
|
||||||
(GET) ["/tree/{tree}/{method}/{key}", tree: String, method: String, key: String] =>
|
|
||||||
btrfs_explorer::http_tree::http_tree(&image, &tree, Some(&method), Some(&key), request).unwrap(),
|
|
||||||
(GET) ["/favicon.ico"] => Response::empty_404(),
|
(GET) ["/favicon.ico"] => Response::empty_404(),
|
||||||
(GET) ["/style.css"] => Response::from_file("text/css", File::open("style.css").unwrap()),
|
|
||||||
(GET) ["/htmx.min.js"] => Response::from_file("text/css", File::open("htmx.min.js").unwrap()),
|
|
||||||
_ => Response::empty_404(),
|
_ => Response::empty_404(),
|
||||||
)
|
)
|
||||||
});
|
});
|
||||||
@ -59,9 +46,17 @@ fn main() -> Result<(), MainError> {
|
|||||||
|
|
||||||
static CIRCLE_IMAGE: &str =
|
static CIRCLE_IMAGE: &str =
|
||||||
"data:image/png;base64,\
|
"data:image/png;base64,\
|
||||||
iVBORw0KGgoAAAANSUhEUgAAAAoAAAAKCAYAAACNMs+9AAAAP0lEQVQY02NgoBn4//+//P///yf9\
|
iVBORw0KGgoAAAANSUhEUgAAAAoAAAAKCAYAAACNMs+9AAABhGlDQ1BJQ0MgcHJvZmlsZQAAKJF9\
|
||||||
////DRRP+v//vzw2hZP+Y4JJ2BS+waLwDUyeiVinIStchkV+GfmeoRoAAJqLWnEf4UboAAAAAElF\
|
kT1Iw0AcxV9bpUUqInYo4pChOtnFLxxrFYpQIdQKrTqYXPoFTRqSFBdHwbXg4Mdi1cHFWVcHV0EQ\
|
||||||
TkSuQmCC";
|
/ABxdXFSdJES/5cUWsR4cNyPd/ced+8Af7PKVLMnAaiaZWRSSSGXXxWCrwhhEAFEMS0xU58TxTQ8\
|
||||||
|
x9c9fHy9i/Ms73N/jn6lYDLAJxAnmG5YxBvEM5uWznmfOMLKkkJ8Tjxu0AWJH7kuu/zGueSwn2dG\
|
||||||
|
jGxmnjhCLJS6WO5iVjZU4inimKJqlO/Puaxw3uKsVuusfU/+wnBBW1nmOs0RpLCIJYgQIKOOCqqw\
|
||||||
|
EKdVI8VEhvaTHv5hxy+SSyZXBYwcC6hBheT4wf/gd7dmcXLCTQongd4X2/4YBYK7QKth29/Htt06\
|
||||||
|
AQLPwJXW8deawOwn6Y2OFjsCBraBi+uOJu8BlztA9EmXDMmRAjT9xSLwfkbflAeGboG+Nbe39j5O\
|
||||||
|
H4AsdZW+AQ4OgbESZa97vDvU3du/Z9r9/QChS3K5hXof0gAAAAZiS0dEAP8A/wD/oL2nkwAAAAlw\
|
||||||
|
SFlzAAAuIwAALiMBeKU/dgAAAAd0SU1FB+cIEQMcKM7EsV8AAAA/SURBVBjTY2CgGfj//7/8////\
|
||||||
|
J/3///8NFE/6//+/PDaFk/5jgknYFL7BovANTJ6JWKchK1yGRX4Z+Z6hGgAAmotacR/hRugAAAAA\
|
||||||
|
SUVORK5CYII=";
|
||||||
|
|
||||||
static EXPLANATION_TEXT: &str = "\
|
static EXPLANATION_TEXT: &str = "\
|
||||||
<h3>Chunks</h3>
|
<h3>Chunks</h3>
|
||||||
@ -212,7 +207,6 @@ fn http_main_boxes(image: &[u8], _req: &Request) -> Response {
|
|||||||
};
|
};
|
||||||
|
|
||||||
// header
|
// header
|
||||||
let addr_map: &AddressMap = extent_tree.reader.as_ref().addr_map();
|
|
||||||
result.push_str(
|
result.push_str(
|
||||||
&format!(
|
&format!(
|
||||||
"<h3 style=\"text-align: center;\">{:x} - {:x} ({}, {})</h3><p>Physical: {}</p>\n",
|
"<h3 style=\"text-align: center;\">{:x} - {:x} ({}, {})</h3><p>Physical: {}</p>\n",
|
||||||
@ -232,8 +226,8 @@ fn http_main_boxes(image: &[u8], _req: &Request) -> Response {
|
|||||||
0x04 => "Metadata",
|
0x04 => "Metadata",
|
||||||
_ => "???",
|
_ => "???",
|
||||||
},
|
},
|
||||||
match addr_map.0.binary_search_by_key(&bg.key.key_id, |x|x.0) {
|
match extent_tree.addr_map.as_ref().0.binary_search_by_key(&bg.key.key_id, |x|x.0) {
|
||||||
Ok(i) => format!("{:x?}", &addr_map.0[i].2),
|
Ok(i) => format!("{:x?}", &extent_tree.addr_map.as_ref().0[i].2),
|
||||||
_ => String::from(""),
|
_ => String::from(""),
|
||||||
}
|
}
|
||||||
)
|
)
|
||||||
@ -271,3 +265,149 @@ fn http_main_boxes(image: &[u8], _req: &Request) -> Response {
|
|||||||
|
|
||||||
Response::html(result)
|
Response::html(result)
|
||||||
}
|
}
|
||||||
|
|
||||||
|
// ----- Error handling -----
|
||||||
|
|
||||||
|
pub struct MainError(String);
|
||||||
|
|
||||||
|
impl std::error::Error for MainError {}
|
||||||
|
|
||||||
|
impl std::fmt::Debug for MainError {
|
||||||
|
fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
|
||||||
|
write!(f, "{}", &self.0)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
impl std::fmt::Display for MainError {
|
||||||
|
fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
|
||||||
|
write!(f, "{}", &self.0)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
impl From<String> for MainError {
|
||||||
|
fn from(value: String) -> MainError {
|
||||||
|
MainError(value)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
impl From<&str> for MainError {
|
||||||
|
fn from(value: &str) -> MainError {
|
||||||
|
MainError::from(String::from(value))
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
impl From<ParseError> for MainError {
|
||||||
|
fn from(value: ParseError) -> MainError {
|
||||||
|
MainError::from(format!("BTRFS format error: {value}"))
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
impl From<std::io::Error> for MainError {
|
||||||
|
fn from(value: std::io::Error) -> MainError {
|
||||||
|
MainError::from(format!("IO error: {value}"))
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
/*
|
||||||
|
fn main() -> Result<(), std::io::Error> {
|
||||||
|
let file = File::open("../image")?;
|
||||||
|
let image = unsafe { Mmap::map(&file)? };
|
||||||
|
|
||||||
|
let addr = AddressTranslation::new(&image);
|
||||||
|
|
||||||
|
rouille::start_server("127.0.0.1:8080", move |request| {
|
||||||
|
http_main_list(&image, &addr, request)
|
||||||
|
});
|
||||||
|
}
|
||||||
|
|
||||||
|
|
||||||
|
fn http_main_list(image: &[u8], addr: &AddressTranslation, req: &Request) -> Response {
|
||||||
|
let chunk_offset = 0x02500000;
|
||||||
|
let nodes_in_chunk = 2048;
|
||||||
|
let mut result = String::new();
|
||||||
|
|
||||||
|
result.push_str("<body>\n");
|
||||||
|
|
||||||
|
for i in 0..nodes_in_chunk {
|
||||||
|
let node = read_node(&image, chunk_offset + i*0x4000);
|
||||||
|
|
||||||
|
let active = ACTIVE_NODES.contains(&(i*0x4000));
|
||||||
|
let style = if active { "color:black;" } else { "color:lightgray;" };
|
||||||
|
|
||||||
|
let newline = format!("<p style=\"{}\">{:x} {} {} {}\n<ul>\n",
|
||||||
|
style,
|
||||||
|
chunk_offset + i*0x4000,
|
||||||
|
node.level,
|
||||||
|
node.items.len(),
|
||||||
|
node.generation);
|
||||||
|
result.push_str(&newline);
|
||||||
|
|
||||||
|
for item in &node.items {
|
||||||
|
let newline = format!("<li style=\"{}\">{:016x} {:?} {:x}</li>\n",
|
||||||
|
style,
|
||||||
|
item.key.key_id,
|
||||||
|
item.key.key_type,
|
||||||
|
item.key.key_offset);
|
||||||
|
result.push_str(&newline);
|
||||||
|
}
|
||||||
|
|
||||||
|
result.push_str("</ul></p>\n");
|
||||||
|
}
|
||||||
|
|
||||||
|
Response::html(result)
|
||||||
|
}
|
||||||
|
*/
|
||||||
|
|
||||||
|
/*
|
||||||
|
fn read_node_log(image: &[u8], trans: &AddressTranslation, log: u64) -> Option<Box<BtrfsNode>> {
|
||||||
|
let phys = trans.to_phys(log)?;
|
||||||
|
Some(read_node(image, phys as usize))
|
||||||
|
}
|
||||||
|
|
||||||
|
fn read_node(image: &[u8], offset: usize) -> Box<BtrfsNode> {
|
||||||
|
let mut result = Box::new(BtrfsNode {
|
||||||
|
csum: FromBytes::get(image, offset),
|
||||||
|
fs_uid: FromBytes::get(image, offset + 0x20),
|
||||||
|
bytenr: FromBytes::get(image, offset + 0x30),
|
||||||
|
flags: FromBytes::get(image, offset + 0x38),
|
||||||
|
chunk_tree_uid: FromBytes::get(image, offset + 0x40),
|
||||||
|
generation: FromBytes::get(image, offset + 0x50),
|
||||||
|
owner: FromBytes::get(image, offset + 0x58),
|
||||||
|
nritems: FromBytes::get(image, offset + 0x60),
|
||||||
|
level: FromBytes::get(image, offset + 0x64),
|
||||||
|
items: Vec::new(),
|
||||||
|
});
|
||||||
|
|
||||||
|
// assuming leaf for now
|
||||||
|
|
||||||
|
for i in 0..result.nritems as usize {
|
||||||
|
let key_id: u64 = FromBytes::get(image, offset + 0x65 + i*0x19);
|
||||||
|
let key_type_code: u8 = FromBytes::get(image, offset + 0x65 + i*0x19 + 0x08);
|
||||||
|
let key_offset: u64 = FromBytes::get(image, offset + 0x65 + i*0x19 + 0x09);
|
||||||
|
let data_offset: u32 = FromBytes::get(image, offset + 0x65 + i*0x19 + 0x11);
|
||||||
|
let data_size: u32 = FromBytes::get(image, offset + 0x65 + i*0x19 + 0x15);
|
||||||
|
|
||||||
|
let key_type = itemtype_from_code(key_type_code);
|
||||||
|
let data_slice = &image[(offset + 0x65 + data_offset as usize) .. (offset + 0x65 + data_offset as usize + data_size as usize)];
|
||||||
|
|
||||||
|
let value = match key_type {
|
||||||
|
BtrfsItemType::BlockGroup => BtrfsValue::BlockGroup(FromBytes::get(data_slice, 0)),
|
||||||
|
BtrfsItemType::Metadata => BtrfsValue::Extent(FromBytes::get(data_slice, 0)),
|
||||||
|
BtrfsItemType::Chunk => BtrfsValue::Chunk(FromBytes::get(data_slice, 0)),
|
||||||
|
BtrfsItemType::Root => BtrfsValue::Root(FromBytes::get(data_slice, 0)),
|
||||||
|
_ => BtrfsValue::Unknown(Vec::from(data_slice)),
|
||||||
|
};
|
||||||
|
|
||||||
|
result.items.push(BtrfsItem {
|
||||||
|
key: BtrfsKey {
|
||||||
|
key_id: key_id,
|
||||||
|
key_type: key_type,
|
||||||
|
key_offset: key_offset,
|
||||||
|
},
|
||||||
|
value: value,
|
||||||
|
});
|
||||||
|
}
|
||||||
|
|
||||||
|
result
|
||||||
|
}
|
||||||
|
*/
|
@ -5,9 +5,3 @@ macro_rules! error {
|
|||||||
macro_rules! err {
|
macro_rules! err {
|
||||||
($($i:expr),*) => { Err(error!($($i),*)) };
|
($($i:expr),*) => { Err(error!($($i),*)) };
|
||||||
}
|
}
|
||||||
|
|
||||||
macro_rules! format_escape {
|
|
||||||
($($arg:tt)*) => {
|
|
||||||
html! { (format!($($arg)*)) }
|
|
||||||
};
|
|
||||||
}
|
|
153
style.css
153
style.css
@ -1,153 +0,0 @@
|
|||||||
body {
|
|
||||||
padding: 0.2em 2em;
|
|
||||||
}
|
|
||||||
|
|
||||||
table {
|
|
||||||
width: 100%;
|
|
||||||
}
|
|
||||||
|
|
||||||
table td {
|
|
||||||
padding: 0.1em 0.2em;
|
|
||||||
}
|
|
||||||
|
|
||||||
table th {
|
|
||||||
text-align: left;
|
|
||||||
border-bottom: 1px solid #ccc;
|
|
||||||
}
|
|
||||||
|
|
||||||
table > tbody > tr.view {
|
|
||||||
cursor: pointer;
|
|
||||||
}
|
|
||||||
|
|
||||||
table > tbody > tr.even {
|
|
||||||
background: #eee;
|
|
||||||
}
|
|
||||||
|
|
||||||
table > tbody > tr.highlight {
|
|
||||||
background: #0cc;
|
|
||||||
}
|
|
||||||
|
|
||||||
table > tbody > tr.fold {
|
|
||||||
display: none;
|
|
||||||
}
|
|
||||||
|
|
||||||
table > tbody > tr.fold > td {
|
|
||||||
padding-left: 1em;
|
|
||||||
}
|
|
||||||
|
|
||||||
table > tbody > tr.fold.open {
|
|
||||||
display: table-row;
|
|
||||||
}
|
|
||||||
|
|
||||||
div.nav {
|
|
||||||
padding: 5px;
|
|
||||||
background-color: #dde;
|
|
||||||
border-radius: 4px;
|
|
||||||
margin: 5px 0;
|
|
||||||
overflow: hidden;
|
|
||||||
text-align: center;
|
|
||||||
}
|
|
||||||
|
|
||||||
a.nav {
|
|
||||||
text-decoration: none;
|
|
||||||
}
|
|
||||||
|
|
||||||
details.item {
|
|
||||||
padding: 3px;
|
|
||||||
background-color: #dde;
|
|
||||||
border-radius: 4px;
|
|
||||||
margin: 3px 0;
|
|
||||||
overflow: hidden;
|
|
||||||
}
|
|
||||||
|
|
||||||
a {
|
|
||||||
color: black;
|
|
||||||
}
|
|
||||||
|
|
||||||
details.highlight {
|
|
||||||
background-color: #bbc;
|
|
||||||
}
|
|
||||||
|
|
||||||
details .details {
|
|
||||||
color: black;
|
|
||||||
// background-color: #222;
|
|
||||||
padding: 10px;
|
|
||||||
margin-top: 5px;
|
|
||||||
border-radius: 4px;
|
|
||||||
}
|
|
||||||
|
|
||||||
details .itemvalue {
|
|
||||||
color: black;
|
|
||||||
padding: 3px;
|
|
||||||
margin: 1px 2px;
|
|
||||||
width: auto;
|
|
||||||
display: inline-block;
|
|
||||||
}
|
|
||||||
|
|
||||||
details .key {
|
|
||||||
color: white;
|
|
||||||
background-color: #999;
|
|
||||||
border-radius: 4px;
|
|
||||||
padding: 3px;
|
|
||||||
margin: 1px 2px;
|
|
||||||
display: inline-block;
|
|
||||||
font-family: monospace;
|
|
||||||
font-size: 12pt;
|
|
||||||
}
|
|
||||||
|
|
||||||
details .key a {
|
|
||||||
color: white;
|
|
||||||
}
|
|
||||||
|
|
||||||
span.key_id {
|
|
||||||
min-width: 160px;
|
|
||||||
text-align: right;
|
|
||||||
}
|
|
||||||
|
|
||||||
span.key_type {
|
|
||||||
min-width: 160px;
|
|
||||||
}
|
|
||||||
|
|
||||||
span.key_offset {
|
|
||||||
min-width: 160px;
|
|
||||||
text-align: right;
|
|
||||||
}
|
|
||||||
|
|
||||||
span.key_type.inode {
|
|
||||||
background-color: #c22;
|
|
||||||
}
|
|
||||||
|
|
||||||
span.key_type.ref {
|
|
||||||
background-color: #aa5;
|
|
||||||
}
|
|
||||||
|
|
||||||
span.key_type.extent {
|
|
||||||
background-color: #151;
|
|
||||||
}
|
|
||||||
|
|
||||||
span.key_type.dir {
|
|
||||||
background-color: #33c;
|
|
||||||
}
|
|
||||||
|
|
||||||
span.key_type.root {
|
|
||||||
background-color: #111;
|
|
||||||
}
|
|
||||||
|
|
||||||
.details table {
|
|
||||||
border-collapse: collapse;
|
|
||||||
margin-bottom: 10px;
|
|
||||||
}
|
|
||||||
|
|
||||||
.details td {
|
|
||||||
border: 1px solid black;
|
|
||||||
}
|
|
||||||
|
|
||||||
.details td:first-child {
|
|
||||||
border: 1px solid black;
|
|
||||||
width: 160px;
|
|
||||||
}
|
|
||||||
|
|
||||||
.details p {
|
|
||||||
padding: 0;
|
|
||||||
margin: 5px 0;
|
|
||||||
}
|
|
Loading…
Reference in New Issue
Block a user