Bench: report (#49)

This commit is contained in:
Zixuan Chen 2022-12-27 14:18:46 +08:00 committed by GitHub
parent 7b86332ee3
commit 9748779f08
No known key found for this signature in database
GPG key ID: 4AEE18F83AFDEB23
14 changed files with 339 additions and 263 deletions

View file

@ -2,7 +2,9 @@
"cSpell.words": [
"arbtest",
"clippy",
"dhat",
"flate",
"gmax",
"heapless",
"Leeeon",
"LOGSTORE",

12
Cargo.lock generated
View file

@ -106,6 +106,17 @@ dependencies = [
"rustc-demangle",
]
[[package]]
name = "bench-utils"
version = "0.1.0"
dependencies = [
"arbitrary",
"enum-as-inner",
"flate2",
"rand",
"serde_json",
]
[[package]]
name = "bit-set"
version = "0.5.3"
@ -676,6 +687,7 @@ dependencies = [
"arbitrary",
"arbtest",
"arref",
"bench-utils",
"color-backtrace",
"crdt-list",
"criterion",

View file

@ -0,0 +1,13 @@
[package]
name = "bench-utils"
version = "0.1.0"
edition = "2021"
# See more keys and their definitions at https://doc.rust-lang.org/cargo/reference/manifest.html
[dependencies]
arbitrary = { version = "1.2.0", features = ["derive"] }
enum-as-inner = "0.5.1"
flate2 = "1.0.25"
rand = "0.8.5"
serde_json = "1.0.89"

View file

@ -0,0 +1,88 @@
use arbitrary::Arbitrary;
use enum_as_inner::EnumAsInner;
use rand::{rngs::StdRng, RngCore, SeedableRng};
use std::io::Read;
use flate2::read::GzDecoder;
use serde_json::Value;
#[derive(Arbitrary)]
pub struct TextAction {
pub pos: usize,
pub ins: String,
pub del: usize,
}
pub fn get_automerge_actions() -> Vec<TextAction> {
const RAW_DATA: &[u8; 901823] =
include_bytes!("../../loro-core/benches/automerge-paper.json.gz");
let mut actions = Vec::new();
let mut d = GzDecoder::new(&RAW_DATA[..]);
let mut s = String::new();
d.read_to_string(&mut s).unwrap();
let json: Value = serde_json::from_str(&s).unwrap();
let txns = json.as_object().unwrap().get("txns");
for txn in txns.unwrap().as_array().unwrap() {
let patches = txn
.as_object()
.unwrap()
.get("patches")
.unwrap()
.as_array()
.unwrap();
for patch in patches {
let pos = patch[0].as_u64().unwrap() as usize;
let del_here = patch[1].as_u64().unwrap() as usize;
let ins_content = patch[2].as_str().unwrap();
actions.push(TextAction {
pos,
ins: ins_content.to_string(),
del: del_here,
});
}
}
actions
}
#[derive(EnumAsInner, Arbitrary)]
pub enum Action {
Text { client: usize, action: TextAction },
SyncAll,
}
pub fn gen_realtime_actions(action_num: usize, client_num: usize, seed: u64) -> Vec<Action> {
let mut gen = StdRng::seed_from_u64(seed);
let size = Action::size_hint(1);
let size = size.1.unwrap_or(size.0);
let mut dest = vec![0; action_num * size];
gen.fill_bytes(&mut dest);
let mut arb = arbitrary::Unstructured::new(&dest);
let mut ans = Vec::new();
let mut last_sync_all = 0;
for i in 0..action_num {
if ans.len() >= action_num {
break;
}
let mut action = arb.arbitrary().unwrap();
match &mut action {
Action::Text { client, action } => {
*client %= client_num;
if !action.ins.is_empty() {
action.ins = (action.ins.as_bytes()[0] as u8).to_string();
}
}
Action::SyncAll => {
last_sync_all = i;
}
}
ans.push(action);
if i - last_sync_all > 100 {
ans.push(Action::SyncAll);
last_sync_all = i;
}
}
ans
}

View file

@ -46,11 +46,13 @@ ctor = "0.1.23"
criterion = "0.4.0"
flate2 = "1.0.24"
arbtest = "0.2.0"
bench-utils = { path = "../bench-utils" }
# See https://matklad.github.io/2021/02/27/delete-cargo-integration-tests.html
[lib]
doctest = false
bench = false
[features]
wasm = ["wasm-bindgen", "js-sys", "serde-wasm-bindgen"]

View file

@ -1,11 +1,10 @@
use criterion::{criterion_group, criterion_main, Criterion};
const RAW_DATA: &[u8; 901823] = include_bytes!("automerge-paper.json.gz");
#[cfg(feature = "test_utils")]
mod sync {
use std::io::Read;
use super::*;
use bench_utils::{get_automerge_actions, TextAction};
use flate2::read::GzDecoder;
use loro_core::container::registry::ContainerWrapper;
use loro_core::log_store::{EncodeConfig, EncodeMode};
@ -13,31 +12,7 @@ mod sync {
use serde_json::Value;
pub fn b4(c: &mut Criterion) {
let mut d = GzDecoder::new(&RAW_DATA[..]);
let mut s = String::new();
d.read_to_string(&mut s).unwrap();
let json: Value = serde_json::from_str(&s).unwrap();
let txns = json.as_object().unwrap().get("txns");
let mut actions = Vec::new();
for (i, txn) in txns.unwrap().as_array().unwrap().iter().enumerate() {
if i > 1000 {
break;
}
let patches = txn
.as_object()
.unwrap()
.get("patches")
.unwrap()
.as_array()
.unwrap();
for patch in patches {
let pos = patch[0].as_u64().unwrap() as usize;
let del_here = patch[1].as_u64().unwrap() as usize;
let ins_content = patch[2].as_str().unwrap();
actions.push((pos, del_here, ins_content));
}
}
let actions = get_automerge_actions();
let mut b = c.benchmark_group("encode_with_sync");
b.sample_size(10);
b.bench_function("update", |b| {
@ -47,23 +22,23 @@ mod sync {
let t2 = c2.get_text("text");
b.iter(|| {
for (i, action) in actions.iter().enumerate() {
let (pos, del, insert) = action;
let TextAction { pos, ins, del } = action;
if i % 2 == 0 {
t1.with_container(|text| {
text.delete(&c1, *pos, *del);
text.insert(&c1, *pos, insert);
text.insert(&c1, *pos, ins);
});
let update = c1
.encode(EncodeConfig::new(EncodeMode::Updates(c2.vv()), None))
.encode(EncodeConfig::new(EncodeMode::Updates(c2.vv_cloned()), None))
.unwrap();
c2.decode(&update).unwrap();
} else {
t2.with_container(|text| {
text.delete(&c2, *pos, *del);
text.insert(&c2, *pos, insert);
text.insert(&c2, *pos, ins);
});
let update = c2
.encode(EncodeConfig::new(EncodeMode::Updates(c1.vv()), None))
.encode(EncodeConfig::new(EncodeMode::Updates(c1.vv_cloned()), None))
.unwrap();
c1.decode(&update).unwrap();
}
@ -77,23 +52,29 @@ mod sync {
let t2 = c2.get_text("text");
b.iter(|| {
for (i, action) in actions.iter().enumerate() {
let (pos, del, insert) = action;
let TextAction { pos, ins, del } = action;
if i % 2 == 0 {
t1.with_container(|text| {
text.delete(&c1, *pos, *del);
text.insert(&c1, *pos, insert);
text.insert(&c1, *pos, ins);
});
let update = c1
.encode(EncodeConfig::new(EncodeMode::RleUpdates(c2.vv()), None))
.encode(EncodeConfig::new(
EncodeMode::RleUpdates(c2.vv_cloned()),
None,
))
.unwrap();
c2.decode(&update).unwrap();
} else {
t2.with_container(|text| {
text.delete(&c2, *pos, *del);
text.insert(&c2, *pos, insert);
text.insert(&c2, *pos, ins);
});
let update = c2
.encode(EncodeConfig::new(EncodeMode::RleUpdates(c1.vv()), None))
.encode(EncodeConfig::new(
EncodeMode::RleUpdates(c1.vv_cloned()),
None,
))
.unwrap();
c1.decode(&update).unwrap();
}
@ -104,41 +85,23 @@ mod sync {
}
#[cfg(feature = "test_utils")]
mod run {
use std::io::Read;
use super::*;
use flate2::read::GzDecoder;
use bench_utils::TextAction;
use loro_core::container::registry::ContainerWrapper;
use loro_core::log_store::{EncodeConfig, EncodeMode};
use loro_core::{LoroCore, VersionVector};
use serde_json::Value;
pub fn b4(c: &mut Criterion) {
let mut d = GzDecoder::new(&RAW_DATA[..]);
let mut s = String::new();
d.read_to_string(&mut s).unwrap();
let json: Value = serde_json::from_str(&s).unwrap();
let txns = json.as_object().unwrap().get("txns");
let actions = bench_utils::get_automerge_actions();
let mut loro = LoroCore::default();
let text = loro.get_text("text");
text.with_container(|text| {
for txn in txns.unwrap().as_array().unwrap() {
let patches = txn
.as_object()
.unwrap()
.get("patches")
.unwrap()
.as_array()
.unwrap();
for patch in patches {
let pos = patch[0].as_u64().unwrap() as usize;
let del_here = patch[1].as_u64().unwrap() as usize;
let ins_content = patch[2].as_str().unwrap();
text.delete(&loro, pos, del_here);
text.insert(&loro, pos, ins_content);
}
for TextAction { pos, ins, del } in actions.iter() {
text.delete(&loro, *pos, *del);
text.insert(&loro, *pos, ins);
}
});
let mut b = c.benchmark_group("encode");
b.bench_function("B4_encode_updates", |b| {
b.iter(|| {

View file

@ -1,20 +1,17 @@
use criterion::{criterion_group, criterion_main, Criterion};
const RAW_DATA: &[u8; 901823] = include_bytes!("automerge-paper.json.gz");
#[cfg(feature = "test_utils")]
mod run {
use std::io::Read;
use super::*;
use arbitrary::Unstructured;
use flate2::read::GzDecoder;
use bench_utils::TextAction;
use loro_core::container::registry::ContainerWrapper;
use loro_core::fuzz::test_multi_sites;
use loro_core::fuzz::Action;
use loro_core::LoroCore;
use rand::Rng;
use rand::SeedableRng;
use serde_json::Value;
pub fn two_client_edits(c: &mut Criterion) {
let mut rgn = rand::rngs::StdRng::seed_from_u64(0);
@ -41,44 +38,14 @@ mod run {
}
pub fn b4(c: &mut Criterion) {
struct Action {
pos: usize,
ins: String,
del: usize,
}
let mut actions = Vec::new();
let mut d = GzDecoder::new(&RAW_DATA[..]);
let mut s = String::new();
d.read_to_string(&mut s).unwrap();
let json: Value = serde_json::from_str(&s).unwrap();
let txns = json.as_object().unwrap().get("txns");
for txn in txns.unwrap().as_array().unwrap() {
let patches = txn
.as_object()
.unwrap()
.get("patches")
.unwrap()
.as_array()
.unwrap();
for patch in patches {
let pos = patch[0].as_u64().unwrap() as usize;
let del_here = patch[1].as_u64().unwrap() as usize;
let ins_content = patch[2].as_str().unwrap();
actions.push(Action {
pos,
ins: ins_content.to_string(),
del: del_here,
});
}
}
// println!("{}", txns.unwrap().as_array().unwrap().len());
let actions = bench_utils::get_automerge_actions();
let mut b = c.benchmark_group("direct_apply");
b.bench_function("B4", |b| {
b.iter(|| {
let mut loro = LoroCore::default();
let text = loro.get_text("text");
text.with_container(|text| {
for Action { pos, ins, del } in actions.iter() {
for TextAction { pos, ins, del } in actions.iter() {
text.delete(&loro, *pos, *del);
text.insert(&loro, *pos, ins);
}
@ -92,7 +59,7 @@ mod run {
loro.subscribe_deep(Box::new(|_| {}));
let text = loro.get_text("text");
text.with_container(|text| {
for Action { pos, ins, del } in actions.iter() {
for TextAction { pos, ins, del } in actions.iter() {
text.delete(&loro, *pos, *del);
text.insert(&loro, *pos, ins);
}
@ -105,7 +72,7 @@ mod run {
b.iter(|| {
let mut loro = LoroCore::default();
let mut loro_b = LoroCore::default();
for Action { pos, ins, del } in actions.iter() {
for TextAction { pos, ins, del } in actions.iter() {
let text = loro.get_text("text");
text.with_container(|text| {
text.delete(&loro, *pos, *del);
@ -124,36 +91,20 @@ mod run {
let mut loro = LoroCore::default();
let mut loro_b = LoroCore::default();
let mut i = 0;
for txn in txns.unwrap().as_array().unwrap() {
for TextAction { pos, ins, del } in actions.iter() {
let pos = *pos;
let del = *del;
i += 1;
if i > 1000 {
break;
}
let mut text = loro.get_text("text");
let patches = txn
.as_object()
.unwrap()
.get("patches")
.unwrap()
.as_array()
.unwrap();
for patch in patches {
let pos = patch[0].as_u64().unwrap() as usize;
let del_here = patch[1].as_u64().unwrap() as usize;
let ins_content = patch[2].as_str().unwrap();
text.delete(&loro, pos, del_here).unwrap();
text.insert(&loro, pos, ins_content).unwrap();
}
text.delete(&loro, pos, del).unwrap();
text.insert(&loro, pos, ins).unwrap();
let mut text = loro_b.get_text("text");
for patch in patches {
let pos = patch[0].as_u64().unwrap() as usize;
let del_here = patch[1].as_u64().unwrap() as usize;
let ins_content = patch[2].as_str().unwrap();
text.delete(&loro_b, pos, del_here).unwrap();
text.insert(&loro_b, pos, ins_content).unwrap();
}
text.delete(&loro_b, pos, del).unwrap();
text.insert(&loro_b, pos, ins).unwrap();
loro_b.import(loro.export(loro_b.vv_cloned()));
loro.import(loro_b.export(loro.vv_cloned()));
}

View file

@ -4,7 +4,7 @@
"deny": "cargo deny check",
"fuzz": "cargo fuzz run",
"quick-fuzz": "deno run -A ./scripts/fuzz.ts text recursive encoding",
"mem": "cargo run --example mem -r --features=test_utils",
"mem": "deno run -A ./scripts/run_mem.ts",
"flame": "cargo flamegraph --example many_actors --root",
"bench": "cargo bench --features test_utils"
}

View file

@ -1,41 +1,16 @@
#[cfg(not(feature = "test_utils"))]
fn main() {}
#[cfg(feature = "test_utils")]
fn main() {
const RAW_DATA: &[u8; 901823] = include_bytes!("../benches/automerge-paper.json.gz");
use std::{io::Read, time::Instant};
use flate2::read::GzDecoder;
use bench_utils::TextAction;
use loro_core::LoroCore;
use serde_json::Value;
let mut d = GzDecoder::new(&RAW_DATA[..]);
let mut s = String::new();
d.read_to_string(&mut s).unwrap();
let json: Value = serde_json::from_str(&s).unwrap();
let txns = json.as_object().unwrap().get("txns");
println!("Txn: {}", txns.unwrap().as_array().unwrap().len());
use std::time::Instant;
let actions = bench_utils::get_automerge_actions();
let mut loro = LoroCore::default();
let start = Instant::now();
for _ in 0..100 {
for (_, txn) in txns.unwrap().as_array().unwrap().iter().enumerate() {
let mut text = loro.get_text("text");
let patches = txn
.as_object()
.unwrap()
.get("patches")
.unwrap()
.as_array()
.unwrap();
for patch in patches {
let pos = patch[0].as_u64().unwrap() as usize;
let del_here = patch[1].as_u64().unwrap() as usize;
let ins_content = patch[2].as_str().unwrap();
text.delete(&loro, pos, del_here).unwrap();
text.insert(&loro, pos, ins_content).unwrap();
}
let mut text = loro.get_text("text");
for TextAction { del, ins, pos } in actions.iter() {
text.delete(&loro, *pos, *del).unwrap();
text.insert(&loro, *pos, ins).unwrap();
}
}
println!("{}", start.elapsed().as_millis());

View file

@ -1,43 +1,25 @@
use std::{
io::{Read, Write},
time::Instant,
};
use std::{io::Write, time::Instant};
use flate2::{read::GzDecoder, write::GzEncoder};
use bench_utils::TextAction;
use flate2::write::GzEncoder;
use loro_core::VersionVector;
use loro_core::{
container::registry::ContainerWrapper,
log_store::{EncodeConfig, EncodeMode},
LoroCore, VersionVector,
LoroCore,
};
use serde_json::Value;
const RAW_DATA: &[u8; 901823] = include_bytes!("../benches/automerge-paper.json.gz");
fn main() {
let mut d = GzDecoder::new(&RAW_DATA[..]);
let mut s = String::new();
d.read_to_string(&mut s).unwrap();
let json: Value = serde_json::from_str(&s).unwrap();
let txns = json.as_object().unwrap().get("txns");
let actions = bench_utils::get_automerge_actions();
let mut loro = LoroCore::default();
let text = loro.get_text("text");
text.with_container(|text| {
for txn in txns.unwrap().as_array().unwrap() {
let patches = txn
.as_object()
.unwrap()
.get("patches")
.unwrap()
.as_array()
.unwrap();
for patch in patches {
let pos = patch[0].as_u64().unwrap() as usize;
let del_here = patch[1].as_u64().unwrap() as usize;
let ins_content = patch[2].as_str().unwrap();
text.delete(&loro, pos, del_here);
text.insert(&loro, pos, ins_content);
}
for TextAction { pos, ins, del } in actions.iter() {
text.delete(&loro, *pos, *del);
text.insert(&loro, *pos, ins);
}
});
let start = Instant::now();
let buf = loro
.encode(EncodeConfig::new(

View file

@ -1,60 +1,117 @@
// use tikv_jemallocator::Jemalloc;
// #[global_allocator]
// static GLOBAL: Jemalloc = Jemalloc;
#[global_allocator]
static ALLOC: dhat::Alloc = dhat::Alloc;
const RAW_DATA: &[u8; 901823] = include_bytes!("../benches/automerge-paper.json.gz");
use std::time::Instant;
use std::{io::Read, time::Instant};
use bench_utils::TextAction;
use loro_core::{log_store::EncodeConfig, LoroCore};
use flate2::read::GzDecoder;
use loro_core::LoroCore;
use serde_json::Value;
pub fn main() {
// let alloc_stats = stats::allocated::mib().unwrap();
let mut d = GzDecoder::new(&RAW_DATA[..]);
let mut s = String::new();
d.read_to_string(&mut s).unwrap();
let json: Value = serde_json::from_str(&s).unwrap();
drop(s);
let txns = json.as_object().unwrap().get("txns");
fn apply_automerge(times: usize) {
let actions = bench_utils::get_automerge_actions();
let start = Instant::now();
let profiler = dhat::Profiler::builder().trim_backtraces(None).build();
let mut loro = LoroCore::default();
let mut text = loro.get_text("text");
for _i in 0..1 {
for txn in txns.unwrap().as_array().unwrap() {
let patches = txn
.as_object()
.unwrap()
.get("patches")
.unwrap()
.as_array()
.unwrap();
for patch in patches {
let pos = patch[0].as_u64().unwrap() as usize;
let del_here = patch[1].as_u64().unwrap() as usize;
let ins_content = patch[2].as_str().unwrap();
text.delete(&loro, pos, del_here).unwrap();
text.insert(&loro, pos, ins_content).unwrap();
}
println!("Apply Automerge Dataset 1X");
for _i in 0..times {
for TextAction { pos, ins, del } in actions.iter() {
text.delete(&loro, *pos, *del).unwrap();
text.insert(&loro, *pos, ins).unwrap();
}
}
drop(profiler);
println!("Used: {} ms", start.elapsed().as_millis());
}
if start.elapsed().as_secs() > 10 {
break;
fn concurrent_actors(actor_num: usize) {
let mut actors: Vec<LoroCore> = Vec::new();
for _ in 0..actor_num {
actors.push(LoroCore::default());
}
let mut updates = Vec::new();
for actor in actors.iter_mut() {
let mut list = actor.get_list("list");
list.insert(actor, 0, 1).unwrap();
updates.push(actor.encode(EncodeConfig::from_vv(None)).unwrap());
}
let mut a = actors.drain(0..1).next().unwrap();
drop(actors);
let profiler = dhat::Profiler::builder().trim_backtraces(None).build();
for update in updates {
a.decode(&update).unwrap();
}
drop(profiler);
}
fn realtime_sync(actor_num: usize, action_num: usize) {
let actions = bench_utils::gen_realtime_actions(action_num, actor_num, 100);
let profiler = dhat::Profiler::builder().trim_backtraces(None).build();
let mut actors = Vec::new();
for _ in 0..actor_num {
actors.push(LoroCore::default());
}
for action in actions {
match action {
bench_utils::Action::Text { client, action } => {
let mut text = actors[client].get_text("text");
let bench_utils::TextAction { pos, ins, del } = action;
let pos = pos % (text.len() + 1);
let del = del.min(text.len() - pos);
text.delete(&actors[client], pos, del).unwrap();
text.insert(&actors[client], pos, &ins).unwrap();
}
bench_utils::Action::SyncAll => {
let mut updates = Vec::new();
for i in 1..actor_num {
let (a, b) = arref::array_mut_ref!(&mut actors, [0, i]);
updates.push(
b.encode(EncodeConfig::from_vv(Some(a.vv_cloned())))
.unwrap(),
);
}
for update in updates {
// TODO: use import batch here
actors[0].decode(&update).unwrap();
}
for i in 1..actor_num {
let (a, b) = arref::array_mut_ref!(&mut actors, [0, i]);
b.decode(
&a.encode(EncodeConfig::from_vv(Some(b.vv_cloned())))
.unwrap(),
)
.unwrap();
}
}
}
}
drop(json);
drop(d);
#[cfg(feature = "test_utils")]
loro.debug_inspect();
drop(profiler);
// e.advance().unwrap();
// let new_new_heap = alloc_stats.read().unwrap();
println!("Apply Automerge Dataset 1X");
// println!("Mem: {} MB", new_new_heap as f64 / 1024. / 1024.);
println!("Used: {} ms", start.elapsed().as_millis());
}
pub fn main() {
let args: Vec<_> = std::env::args().collect();
if args.len() < 2 {
apply_automerge(1);
return;
}
match args[1].as_str() {
"automerge" => {
apply_automerge(1);
}
"100_concurrent" => {
concurrent_actors(100);
}
"200_concurrent" => {
concurrent_actors(200);
}
"10_actor_sync_1000_actions" => realtime_sync(10, 1000),
"20_actor_sync_1000_actions" => realtime_sync(20, 1000),
"10_actor_sync_2000_actions" => realtime_sync(10, 2000),
_ => {
panic!("Unknown command `{}`", args.join(" "));
}
}
}

View file

@ -3,51 +3,24 @@ fn main() {}
#[cfg(feature = "test_utils")]
fn main() {
const RAW_DATA: &[u8; 901823] = include_bytes!("../benches/automerge-paper.json.gz");
use std::{io::Read, time::Instant};
use std::time::Instant;
use flate2::read::GzDecoder;
use bench_utils::{get_automerge_actions, TextAction};
use loro_core::LoroCore;
use serde_json::Value;
let mut d = GzDecoder::new(&RAW_DATA[..]);
let mut s = String::new();
d.read_to_string(&mut s).unwrap();
let json: Value = serde_json::from_str(&s).unwrap();
let txns = json.as_object().unwrap().get("txns");
println!("Txn: {}", txns.unwrap().as_array().unwrap().len());
let actions = get_automerge_actions();
let mut loro = LoroCore::default();
let mut loro_b = LoroCore::default();
let mut loro_c = LoroCore::default();
let start = Instant::now();
for (i, txn) in txns.unwrap().as_array().unwrap().iter().enumerate() {
for (i, TextAction { pos, ins, del }) in actions.iter().enumerate() {
let mut text = loro.get_text("text");
let patches = txn
.as_object()
.unwrap()
.get("patches")
.unwrap()
.as_array()
.unwrap();
for patch in patches {
let pos = patch[0].as_u64().unwrap() as usize;
let del_here = patch[1].as_u64().unwrap() as usize;
let ins_content = patch[2].as_str().unwrap();
text.delete(&loro, pos, del_here).unwrap();
text.insert(&loro, pos, ins_content).unwrap();
}
text.delete(&loro, *pos, *del).unwrap();
text.insert(&loro, *pos, ins).unwrap();
drop(text);
let mut text = loro_b.get_text("text");
for patch in patches {
let pos = patch[0].as_u64().unwrap() as usize;
let del_here = patch[1].as_u64().unwrap() as usize;
let ins_content = patch[2].as_str().unwrap();
text.delete(&loro_b, pos, del_here).unwrap();
text.insert(&loro_b, pos, ins_content).unwrap();
}
drop(text);
text.delete(&loro_b, *pos, *del).unwrap();
text.insert(&loro_b, *pos, ins).unwrap();
if i % 10 == 0 {
loro.import(loro_b.export(loro.vv_cloned()));
loro_b.import(loro.export(loro_b.vv_cloned()));

View file

@ -0,0 +1,49 @@
import __ from "https://deno.land/x/dirname@1.1.2/mod.ts";
const { __dirname } = __(import.meta);
import { resolve } from "https://deno.land/std@0.105.0/path/mod.ts";
export const Tasks = [
"100_concurrent",
"200_concurrent",
"automerge",
"10_actor_sync_1000_actions",
"20_actor_sync_1000_actions",
"10_actor_sync_2000_actions",
];
export interface Result {
task: string;
maxBytes: number;
endBytes: number;
}
// run `cargo run --example mem -r -- ${task}`
export async function run(task: string): Promise<Result> {
const cmd = `cargo run --example mem -r -- ${task}`;
const process = Deno.run({
cmd: cmd.split(" "),
cwd: resolve(__dirname, ".."),
stdout: "piped",
stderr: "piped",
});
const output = new TextDecoder().decode(await process.stderrOutput());
try {
// extract "2,555,555" from `dhat: At t-gmax: 2,555,555 bytes`
const maxBytes = parseInt(
output.match(/dhat: At t-gmax:\s+((\d+,?)+) bytes/)![1].replace(/,/g, ""),
);
const endBytes = parseInt(
output.match(/dhat: At t-end:\s+((\d+,?)+) bytes/)![1].replace(/,/g, ""),
);
return {
task,
maxBytes,
endBytes,
};
} catch (e) {
console.error(e);
console.log(output);
throw e;
}
}

View file

@ -0,0 +1,9 @@
import { run, Tasks } from "./mem.ts";
const output = [];
for (const task of Tasks) {
const result = await run(task);
output.push(result);
}
console.log(output);