Skip to content

Commit df95812

Browse files
committed
implement the one-shot serialize with backrefs in terms of the incremental serializer
1 parent 110d69c commit df95812

File tree

2 files changed

+6
-90
lines changed

2 files changed

+6
-90
lines changed

src/serde/mod.rs

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -22,7 +22,7 @@ pub use de_tree::{parse_triples, ParsedTriple};
2222
pub use incremental::{Serializer, UndoState};
2323
pub use object_cache::{serialized_length, treehash, ObjectCache};
2424
pub use ser::{node_to_bytes, node_to_bytes_limit};
25-
pub use ser_br::{node_to_bytes_backrefs, node_to_bytes_backrefs_limit};
25+
pub use ser_br::node_to_bytes_backrefs;
2626
pub use tools::{
2727
serialized_length_from_bytes, serialized_length_from_bytes_trusted, tree_hash_from_stream,
2828
};

src/serde/ser_br.rs

Lines changed: 5 additions & 89 deletions
Original file line numberDiff line numberDiff line change
@@ -1,93 +1,14 @@
11
// Serialization with "back-references"
22

33
use std::io;
4-
use std::io::Cursor;
54

6-
use super::object_cache::{serialized_length, treehash, ObjectCache};
7-
use super::read_cache_lookup::ReadCacheLookup;
8-
use super::write_atom::write_atom;
9-
use crate::allocator::{Allocator, NodePtr, SExp};
10-
use crate::serde::ser::LimitedWriter;
11-
12-
const BACK_REFERENCE: u8 = 0xfe;
13-
const CONS_BOX_MARKER: u8 = 0xff;
14-
15-
#[derive(PartialEq, Eq)]
16-
enum ReadOp {
17-
Parse,
18-
Cons,
19-
}
20-
21-
pub fn node_to_stream_backrefs<W: io::Write>(
22-
allocator: &Allocator,
23-
node: NodePtr,
24-
f: &mut W,
25-
) -> io::Result<()> {
26-
let mut read_op_stack: Vec<ReadOp> = vec![ReadOp::Parse];
27-
let mut write_stack: Vec<NodePtr> = vec![node];
28-
29-
let mut read_cache_lookup = ReadCacheLookup::new();
30-
31-
let mut thc = ObjectCache::new(treehash);
32-
let mut slc = ObjectCache::new(serialized_length);
33-
34-
while let Some(node_to_write) = write_stack.pop() {
35-
let op = read_op_stack.pop();
36-
assert!(op == Some(ReadOp::Parse));
37-
38-
let node_serialized_length = *slc
39-
.get_or_calculate(allocator, &node_to_write, None)
40-
.expect("couldn't calculate serialized length");
41-
let node_tree_hash = thc
42-
.get_or_calculate(allocator, &node_to_write, None)
43-
.expect("can't get treehash");
44-
match read_cache_lookup.find_path(node_tree_hash, node_serialized_length) {
45-
Some(path) => {
46-
f.write_all(&[BACK_REFERENCE])?;
47-
write_atom(f, &path)?;
48-
read_cache_lookup.push(*node_tree_hash);
49-
}
50-
None => match allocator.sexp(node_to_write) {
51-
SExp::Pair(left, right) => {
52-
f.write_all(&[CONS_BOX_MARKER])?;
53-
write_stack.push(right);
54-
write_stack.push(left);
55-
read_op_stack.push(ReadOp::Cons);
56-
read_op_stack.push(ReadOp::Parse);
57-
read_op_stack.push(ReadOp::Parse);
58-
}
59-
SExp::Atom => {
60-
let atom = allocator.atom(node_to_write);
61-
write_atom(f, atom.as_ref())?;
62-
read_cache_lookup.push(*node_tree_hash);
63-
}
64-
},
65-
}
66-
while !read_op_stack.is_empty() && read_op_stack[read_op_stack.len() - 1] == ReadOp::Cons {
67-
read_op_stack.pop();
68-
read_cache_lookup.pop2_and_cons();
69-
}
70-
}
71-
Ok(())
72-
}
73-
74-
pub fn node_to_bytes_backrefs_limit(
75-
a: &Allocator,
76-
node: NodePtr,
77-
limit: usize,
78-
) -> io::Result<Vec<u8>> {
79-
let buffer = Cursor::new(Vec::new());
80-
let mut writer = LimitedWriter::new(buffer, limit);
81-
node_to_stream_backrefs(a, node, &mut writer)?;
82-
let vec = writer.into_inner().into_inner();
83-
Ok(vec)
84-
}
5+
use crate::allocator::{Allocator, NodePtr};
6+
use crate::serde::incremental::Serializer;
857

868
pub fn node_to_bytes_backrefs(a: &Allocator, node: NodePtr) -> io::Result<Vec<u8>> {
87-
let mut buffer = Cursor::new(Vec::new());
88-
node_to_stream_backrefs(a, node, &mut buffer)?;
89-
let vec = buffer.into_inner();
90-
Ok(vec)
9+
let mut ser = Serializer::new();
10+
ser.add(a, node, None)?;
11+
Ok(ser.into_inner())
9112
}
9213

9314
#[cfg(test)]
@@ -107,10 +28,5 @@ mod tests {
10728
let expected = &[255, 255, 255, 133, 1, 2, 3, 4, 5, 254, 2, 254, 2, 254, 2];
10829

10930
assert_eq!(node_to_bytes_backrefs(&a, l3).unwrap(), expected);
110-
assert_eq!(node_to_bytes_backrefs_limit(&a, l3, 15).unwrap(), expected);
111-
assert_eq!(
112-
node_to_bytes_backrefs_limit(&a, l3, 14).unwrap_err().kind(),
113-
io::ErrorKind::OutOfMemory
114-
);
11531
}
11632
}

0 commit comments

Comments
 (0)