Feat: checkout to target version & use unicode index by default (#98)

* feat: checkout to frontiers

* feat: record timestamp

* fix: use unicode len by default for text
now "你好" has length of 2 instead of 6

* chore: rm dbg!
This commit is contained in:
Zixuan Chen 2023-08-04 10:45:23 +08:00 committed by GitHub
parent 1e736df133
commit c105ff2220
No known key found for this signature in database
GPG key ID: 4AEE18F83AFDEB23
31 changed files with 533 additions and 226 deletions

View file

@ -21,7 +21,7 @@
],
"rust-analyzer.runnableEnv": {
"RUST_BACKTRACE": "full",
"DEBUG": "*"
// "DEBUG": "*"
},
"rust-analyzer.cargo.features": ["test_utils"],
"editor.defaultFormatter": "rust-lang.rust-analyzer",

View file

@ -285,7 +285,6 @@ mod tests {
let mut bytes = CompactBytes::new();
bytes.append(b"1234kk 123456 1234xyz");
let ans = bytes.alloc_advance(b"012345678");
dbg!(&ans);
assert_eq!(ans.len(), 3);
assert_eq!(ans[0].len(), 1);
assert_eq!(ans[1].len(), 6);

View file

@ -29,14 +29,14 @@ mod sync {
t1.insert(&mut txn, *pos, ins).unwrap();
txn.commit().unwrap();
let update = c1.export_from(&c2.vv_cloned());
let update = c1.export_from(&c2.oplog_vv());
c2.import(&update).unwrap();
} else {
let mut txn = c2.txn().unwrap();
t2.delete(&mut txn, *pos, *del).unwrap();
t2.insert(&mut txn, *pos, ins).unwrap();
txn.commit().unwrap();
let update = c2.export_from(&c1.vv_cloned());
let update = c2.export_from(&c1.oplog_vv());
c1.import(&update).unwrap();
}
}
@ -122,7 +122,7 @@ mod import {
text2.insert(&mut c2.txn().unwrap(), 0, "2").unwrap();
}
c1.import(&c2.export_from(&c1.vv_cloned())).unwrap();
c1.import(&c2.export_from(&c1.oplog_vv())).unwrap();
})
});
}

View file

@ -41,7 +41,7 @@ mod run {
let mut txn = actor.txn().unwrap();
text.insert(
&mut txn,
(action.pos as usize) % text.len().max(1),
(action.pos as usize) % text.len_unicode().max(1),
action.value.to_string().as_str(),
)
.unwrap();
@ -60,17 +60,17 @@ mod run {
let b = (action.sync as usize) % len;
if a != b {
let (a, b) = arref::array_mut_ref!(&mut actors, [a, b]);
a.import(&b.export_from(&a.vv_cloned())).unwrap();
a.import(&b.export_from(&a.oplog_vv())).unwrap();
}
}
for i in 1..actors.len() {
let (a, b) = arref::array_mut_ref!(&mut actors, [0, i]);
a.import(&b.export_from(&a.vv_cloned())).unwrap();
a.import(&b.export_from(&a.oplog_vv())).unwrap();
}
for i in 1..actors.len() {
let (a, b) = arref::array_mut_ref!(&mut actors, [i, 0]);
a.import(&b.export_from(&a.vv_cloned())).unwrap();
a.import(&b.export_from(&a.oplog_vv())).unwrap();
}
})
});
@ -91,12 +91,12 @@ mod run {
for i in 1..actors.len() {
let (a, b) = arref::array_mut_ref!(&mut actors, [0, i]);
a.import(&b.export_from(&a.vv_cloned())).unwrap();
a.import(&b.export_from(&a.oplog_vv())).unwrap();
}
for i in 1..actors.len() {
let (a, b) = arref::array_mut_ref!(&mut actors, [0, i]);
b.import(&a.export_from(&b.vv_cloned())).unwrap();
b.import(&a.export_from(&b.oplog_vv())).unwrap();
}
})
});

View file

@ -214,7 +214,7 @@ mod run {
}
loro_b
.import(&loro.export_from(&loro_b.vv_cloned()))
.import(&loro.export_from(&loro_b.oplog_vv()))
.unwrap();
}
})
@ -249,9 +249,9 @@ mod run {
text2.insert(&mut txn, pos, ins).unwrap();
}
loro_b
.import(&loro.export_from(&loro_b.vv_cloned()))
.import(&loro.export_from(&loro_b.oplog_vv()))
.unwrap();
loro.import(&loro_b.export_from(&loro.vv_cloned())).unwrap();
loro.import(&loro_b.export_from(&loro.oplog_vv())).unwrap();
}
})
});

View file

@ -10,12 +10,12 @@ fn main() {
let actor = LoroDoc::default();
let mut output = Vec::new();
let list = actor.get_list("list");
let mut last_vv = actor.vv_cloned();
let mut last_vv = actor.oplog_vv();
for i in 0..10000 {
let mut txn = actor.txn().unwrap();
list.insert(&mut txn, i, i.to_string().into()).unwrap();
output.push(actor.export_from(&last_vv.clone()));
last_vv = actor.vv_cloned();
last_vv = actor.oplog_vv();
}
println!("{} ms", start.elapsed().as_millis());
// drop(p)

View file

@ -119,15 +119,6 @@ version = "0.2.3"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "67ba02a97a2bd10f4b59b25c7973101c79642302776489e030cd13cdab09ed15"
[[package]]
name = "compact-bytes"
version = "0.1.0"
dependencies = [
"append-only-bytes",
"fxhash",
"linked-hash-map",
]
[[package]]
name = "crdt-list"
version = "0.4.0"
@ -376,12 +367,6 @@ dependencies = [
"once_cell",
]
[[package]]
name = "linked-hash-map"
version = "0.5.6"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "0717cef1bc8b636c6e1c1bbdefc09e6322da8a9321966e8928ef80d20f7f770f"
[[package]]
name = "lock_api"
version = "0.4.9"
@ -412,7 +397,6 @@ dependencies = [
"append-only-bytes",
"arbitrary",
"arref",
"compact-bytes",
"crdt-list",
"debug-log",
"enum-as-inner 0.5.1",
@ -425,7 +409,6 @@ dependencies = [
"jumprope",
"loro-common",
"loro-preload",
"lz4_flex",
"miniz_oxide",
"num",
"postcard",
@ -435,7 +418,6 @@ dependencies = [
"serde_columnar",
"serde_json",
"smallvec",
"smartstring",
"string_cache",
"tabled",
"thiserror",
@ -461,15 +443,6 @@ dependencies = [
"serde_columnar",
]
[[package]]
name = "lz4_flex"
version = "0.11.1"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "3ea9b256699eda7b0387ffbc776dd625e28bde3918446381781245b7a50349d8"
dependencies = [
"twox-hash",
]
[[package]]
name = "miniz_oxide"
version = "0.7.1"
@ -877,17 +850,6 @@ dependencies = [
"serde",
]
[[package]]
name = "smartstring"
version = "1.0.1"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "3fb72c633efbaa2dd666986505016c32c3044395ceaf881518399d2f4127ee29"
dependencies = [
"autocfg",
"static_assertions",
"version_check",
]
[[package]]
name = "spin"
version = "0.9.4"
@ -903,12 +865,6 @@ version = "1.2.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "a8f112729512f8e442d81f95a8a7ddf2b7c6b8a1a6f509a95864142b30cab2d3"
[[package]]
name = "static_assertions"
version = "1.1.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "a2eb9349b6444b326872e140eb1cf5e7c522154d69e7a0ffb0fb81c06b37543f"
[[package]]
name = "str_indices"
version = "0.4.1"
@ -1039,16 +995,6 @@ dependencies = [
"once_cell",
]
[[package]]
name = "twox-hash"
version = "1.6.3"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "97fee6b57c6a41524a810daee9286c02d7752c4253064d0b05472833a438f675"
dependencies = [
"cfg-if",
"static_assertions",
]
[[package]]
name = "typenum"
version = "1.16.0"

View file

@ -1,10 +1,10 @@
use std::{
ops::{Range, RangeBounds},
ops::Range,
sync::{atomic::AtomicUsize, Arc, Mutex},
};
use append_only_bytes::{AppendOnlyBytes, BytesSlice};
use fxhash::FxHashMap;
use jumprope::JumpRope;
use crate::{
container::{
@ -28,7 +28,7 @@ struct InnerSharedArena {
container_id_to_idx: Mutex<FxHashMap<ContainerID, ContainerIdx>>,
/// The parent of each container.
parents: Mutex<FxHashMap<ContainerIdx, Option<ContainerIdx>>>,
text: Mutex<AppendOnlyBytes>,
text: Mutex<JumpRope>,
text_utf16_len: AtomicUsize,
values: Mutex<Vec<LoroValue>>,
root_c_idx: Mutex<Vec<ContainerIdx>>,
@ -88,15 +88,16 @@ impl SharedArena {
/// return utf16 len
pub fn alloc_str(&self, str: &str) -> StrAllocResult {
let mut text_lock = self.inner.text.lock().unwrap();
let start = text_lock.len();
let start = text_lock.len_chars();
let utf16_len = count_utf16_chars(str.as_bytes());
text_lock.push_slice(str.as_bytes());
let pos = text_lock.len_chars();
text_lock.insert(pos, str);
self.inner
.text_utf16_len
.fetch_add(utf16_len, std::sync::atomic::Ordering::SeqCst);
StrAllocResult {
start,
end: text_lock.len(),
end: text_lock.len_chars(),
utf16_len,
}
}
@ -107,7 +108,8 @@ impl SharedArena {
self.inner
.text_utf16_len
.fetch_add(utf16_len, std::sync::atomic::Ordering::SeqCst);
text_lock.push_slice(bytes);
let pos = text_lock.len_chars();
text_lock.insert(pos, std::str::from_utf8(bytes).unwrap());
}
pub fn utf16_len(&self) -> usize {
@ -180,8 +182,19 @@ impl SharedArena {
}
}
pub fn slice_bytes(&self, range: impl RangeBounds<usize>) -> BytesSlice {
self.inner.text.lock().unwrap().slice(range)
pub fn slice_str(&self, range: Range<usize>) -> String {
let mut ans = String::with_capacity(range.len());
for span in self.inner.text.lock().unwrap().slice_substrings(range) {
ans.push_str(span);
}
ans
}
pub fn with_text_slice(&self, range: Range<usize>, mut f: impl FnMut(&str)) {
for span in self.inner.text.lock().unwrap().slice_substrings(range) {
f(span);
}
}
pub fn get_value(&self, idx: usize) -> Option<LoroValue> {
@ -241,13 +254,16 @@ impl SharedArena {
}),
}
}
crate::text::text_content::ListSlice::RawStr(str) => {
let bytes = self.alloc_str(&str);
crate::text::text_content::ListSlice::RawStr {
str,
unicode_len: _,
} => {
let slice = self.alloc_str(&str);
Op {
counter,
container,
content: crate::op::InnerContent::List(InnerListOp::Insert {
slice: SliceRange::from(bytes.start as u32..bytes.end as u32),
slice: SliceRange::from(slice.start as u32..slice.end as u32),
pos,
}),
}
@ -260,17 +276,6 @@ impl SharedArena {
pos,
}),
},
crate::text::text_content::ListSlice::RawBytes(x) => {
let bytes = self.alloc_str(std::str::from_utf8(&x).unwrap());
Op {
counter,
container,
content: crate::op::InnerContent::List(InnerListOp::Insert {
slice: SliceRange::from(bytes.start as u32..bytes.end as u32),
pos,
}),
}
}
},
ListOp::Delete(span) => Op {
counter,

View file

@ -119,6 +119,18 @@ impl DagNode for Change {
}
impl Change {
pub fn lamport(&self) -> Lamport {
self.lamport
}
pub fn timestamp(&self) -> Timestamp {
self.timestamp
}
pub fn id(&self) -> ID {
self.id
}
pub fn can_merge_right(&self, other: &Self) -> bool {
other.id.peer == self.id.peer
&& other.id.counter == self.id.counter + self.content_len() as Counter
@ -126,3 +138,27 @@ impl Change {
&& other.deps[0].peer == self.id.peer
}
}
#[cfg(not(all(feature = "wasm", target_arch = "wasm32")))]
pub(crate) fn get_sys_timestamp() -> Timestamp {
use std::time::{SystemTime, UNIX_EPOCH};
SystemTime::now()
.duration_since(UNIX_EPOCH)
.unwrap()
.as_secs()
.as_()
}
#[cfg(all(feature = "wasm", target_arch = "wasm32"))]
pub fn get_sys_timestamp() -> Timestamp {
use wasm_bindgen::prelude::wasm_bindgen;
#[wasm_bindgen]
extern "C" {
// Use `js_namespace` here to bind `console.log(..)` instead of just
// `log(..)`
#[wasm_bindgen(js_namespace = Date)]
pub fn now() -> f64;
}
now() as Timestamp
}

View file

@ -16,6 +16,8 @@ pub enum ListOp<'a> {
#[derive(EnumAsInner, Debug, Clone)]
pub enum InnerListOp {
// Note: len may not equal to slice.len() because for text len is unicode len while the slice
// is utf8 bytes.
Insert { slice: SliceRange, pos: usize },
Delete(DeleteSpan),
}
@ -271,10 +273,11 @@ impl Mergable for InnerListOp {
Self: Sized,
{
match self {
InnerListOp::Insert { pos, slice } => match _other {
InnerListOp::Insert { pos, slice, .. } => match _other {
InnerListOp::Insert {
pos: other_pos,
slice: other_slice,
..
} => pos + slice.content_len() == *other_pos && slice.is_mergable(other_slice, &()),
_ => false,
},
@ -344,9 +347,9 @@ mod test {
},
ListOp::Delete(DeleteSpan::new(0, 3)),
];
// let vec = postcard::to_allocvec(&list_op);
// dbg!(&vec);
let list_op_buf = vec![2, 0, 3, 0, 0, 1, 0, 6];
let actual = postcard::to_allocvec(&list_op).unwrap();
let list_op_buf = vec![2, 0, 2, 0, 0, 1, 0, 6];
assert_eq!(&actual, &list_op_buf);
assert_eq!(
postcard::from_bytes::<Vec<ListOp>>(&list_op_buf).unwrap(),
list_op

View file

@ -1,6 +1,5 @@
use std::{borrow::Cow, ops::Range};
use append_only_bytes::BytesSlice;
use enum_as_inner::EnumAsInner;
use rle::{HasLength, Mergable, Sliceable};
use serde::{ser::SerializeSeq, Deserialize, Serialize};
@ -12,8 +11,10 @@ use crate::{delta::DeltaValue, LoroValue};
#[derive(PartialEq, Debug, EnumAsInner, Clone, Serialize, Deserialize)]
pub enum ListSlice<'a> {
RawData(Cow<'a, [LoroValue]>),
RawStr(Cow<'a, str>),
RawBytes(BytesSlice),
RawStr {
str: Cow<'a, str>,
unicode_len: usize,
},
Unknown(usize),
}
@ -94,8 +95,11 @@ impl<'a> ListSlice<'a> {
pub fn to_static(&self) -> ListSlice<'static> {
match self {
ListSlice::RawData(x) => ListSlice::RawData(Cow::Owned(x.to_vec())),
ListSlice::RawStr(x) => ListSlice::RawStr(Cow::Owned(x.to_string())),
ListSlice::RawBytes(x) => ListSlice::RawBytes(x.clone()),
ListSlice::RawStr { str, unicode_len } => ListSlice::RawStr {
str: Cow::Owned(str.to_string()),
unicode_len: *unicode_len,
},
ListSlice::Unknown(x) => ListSlice::Unknown(*x),
}
}
@ -104,10 +108,9 @@ impl<'a> ListSlice<'a> {
impl<'a> HasLength for ListSlice<'a> {
fn content_len(&self) -> usize {
match self {
ListSlice::RawStr(s) => s.len(),
ListSlice::RawStr { unicode_len, .. } => *unicode_len,
ListSlice::Unknown(x) => *x,
ListSlice::RawData(x) => x.len(),
ListSlice::RawBytes(x) => x.len(),
}
}
}
@ -115,13 +118,21 @@ impl<'a> HasLength for ListSlice<'a> {
impl<'a> Sliceable for ListSlice<'a> {
fn slice(&self, from: usize, to: usize) -> Self {
match self {
ListSlice::RawStr(s) => ListSlice::RawStr(Cow::Owned(s[from..to].into())),
ListSlice::RawStr {
str,
unicode_len: _,
} => {
let ans = str.chars().skip(from).take(to - from).collect::<String>();
ListSlice::RawStr {
str: Cow::Owned(ans),
unicode_len: to - from,
}
}
ListSlice::Unknown(_) => ListSlice::Unknown(to - from),
ListSlice::RawData(x) => match x {
Cow::Borrowed(x) => ListSlice::RawData(Cow::Borrowed(&x[from..to])),
Cow::Owned(x) => ListSlice::RawData(Cow::Owned(x[from..to].into())),
},
ListSlice::RawBytes(x) => ListSlice::RawBytes(x.slice(from, to)),
}
}
}
@ -205,10 +216,17 @@ mod test {
fn fix_fields_order() {
let list_slice = vec![
ListSlice::RawData(vec![LoroValue::Bool(true)].into()),
ListSlice::RawStr("".into()),
ListSlice::RawStr {
str: "".into(),
unicode_len: 0,
},
ListSlice::Unknown(0),
];
let list_slice_buf = vec![3, 0, 1, 1, 1, 1, 0, 3, 0];
let list_slice_buf = vec![3, 0, 1, 1, 1, 1, 0, 0, 2, 0];
assert_eq!(
&postcard::to_allocvec(&list_slice).unwrap(),
&list_slice_buf
);
assert_eq!(
postcard::from_bytes::<Vec<ListSlice>>(&list_slice_buf).unwrap(),
list_slice

View file

@ -241,7 +241,12 @@ impl DiffCalculatorTrait for MapDiffCalculator {
.applied_or_smaller
.peek()
.cloned()
.unwrap();
.unwrap_or_else(|| MapValue {
counter: 0,
value: None,
lamport: (0, 0),
});
updated.insert(key, value);
}

View file

@ -179,11 +179,11 @@ pub(super) fn encode_oplog_changes(oplog: &OpLog, vv: &VersionVector) -> Vec<u8>
// TODO: perf may be optimized by using borrow type instead
match slice {
ListSlice::RawData(v) => LoroValue::List(Arc::new(v.to_vec())),
ListSlice::RawStr(s) => LoroValue::String(Arc::new(s.to_string())),
ListSlice::RawStr {
str,
unicode_len: _,
} => LoroValue::String(Arc::new(str.to_string())),
ListSlice::Unknown(_) => LoroValue::Null,
ListSlice::RawBytes(s) => LoroValue::String(Arc::new(
(std::str::from_utf8(&s).unwrap()).to_string(),
)),
},
),
ListOp::Delete(span) => {
@ -294,9 +294,10 @@ pub(super) fn decode_changes_to_inner_format_oplog(
},
_ => {
let slice = match value {
LoroValue::String(s) => {
ListSlice::RawStr(std::borrow::Cow::Owned(s.to_string()))
}
LoroValue::String(s) => ListSlice::RawStr {
str: std::borrow::Cow::Owned(s.to_string()),
unicode_len: s.chars().count(),
},
LoroValue::List(v) => {
ListSlice::RawData(std::borrow::Cow::Owned((*v).clone()))
}

View file

@ -156,18 +156,18 @@ impl Actionable for Vec<LoroDoc> {
Action::Sync { from, to } => {
if from != to {
let (from, to) = arref::array_mut_ref!(self, [*from as usize, *to as usize]);
let to_vv = to.vv_cloned();
let to_vv = to.oplog_vv();
to.import(&from.export_from(&to_vv)).unwrap();
}
}
Action::SyncAll => {
for i in 1..self.len() {
let (a, b) = array_mut_ref!(self, [0, i]);
a.import(&b.export_from(&a.vv_cloned())).unwrap();
a.import(&b.export_from(&a.oplog_vv())).unwrap();
}
for i in 1..self.len() {
let (a, b) = array_mut_ref!(self, [0, i]);
b.import(&a.export_from(&b.vv_cloned())).unwrap();
b.import(&a.export_from(&b.oplog_vv())).unwrap();
}
}
}
@ -223,13 +223,13 @@ fn check_synced_refactored(sites: &mut [LoroDoc]) {
debug_log::group_end!();
} else {
debug_log::group!("Import {} to {}", j, i);
a.import(&b.export_from(&a.vv_cloned())).unwrap();
a.import(&b.export_from(&a.oplog_vv())).unwrap();
debug_log::group_end!();
}
}
{
debug_log::group!("Import {} to {}", i, j);
b.import(&a.export_from(&b.vv_cloned())).unwrap();
b.import(&a.export_from(&b.oplog_vv())).unwrap();
debug_log::group_end!();
}
check_eq_refactored(a, b);

View file

@ -344,10 +344,10 @@ impl Actionable for Vec<Actor> {
.text_containers
.get(*container_idx as usize)
{
*pos %= (text.len() as u8).max(1);
*pos %= (text.len_unicode() as u8).max(1);
if *is_del {
*value &= 0x1f;
*value = (*value).min(text.len() as u16 - (*pos) as u16);
*value = (*value).min(text.len_unicode() as u16 - (*pos) as u16);
}
} else {
*is_del = false;
@ -373,10 +373,10 @@ impl Actionable for Vec<Actor> {
});
a.loro
.import(&b.loro.export_from(&a.loro.vv_cloned()))
.import(&b.loro.export_from(&a.loro.oplog_vv()))
.unwrap();
b.loro
.import(&a.loro.export_from(&b.loro.vv_cloned()))
.import(&a.loro.export_from(&b.loro.oplog_vv()))
.unwrap();
b.map_containers.iter().for_each(|x| {
@ -433,7 +433,7 @@ impl Actionable for Vec<Actor> {
for i in 1..self.len() {
let (a, b) = array_mut_ref!(self, [0, i]);
a.loro
.import(&b.loro.export_from(&a.loro.vv_cloned()))
.import(&b.loro.export_from(&a.loro.oplog_vv()))
.unwrap();
b.map_containers.iter().for_each(|x| {
let id = x.id();
@ -461,7 +461,7 @@ impl Actionable for Vec<Actor> {
for i in 1..self.len() {
let (a, b) = array_mut_ref!(self, [0, i]);
b.loro
.import(&a.loro.export_from(&b.loro.vv_cloned()))
.import(&a.loro.export_from(&b.loro.oplog_vv()))
.unwrap();
b.map_containers = a
.map_containers
@ -673,14 +673,10 @@ fn check_synced(sites: &mut [Actor]) {
let b_doc = &mut b.loro;
if (i + j) % 2 == 0 {
debug_log::group!("Updates {} to {}", j, i);
a_doc
.import(&b_doc.export_from(&a_doc.vv_cloned()))
.unwrap();
a_doc.import(&b_doc.export_from(&a_doc.oplog_vv())).unwrap();
debug_log::group_end!();
debug_log::group!("Updates {} to {}", i, j);
b_doc
.import(&a_doc.export_from(&b_doc.vv_cloned()))
.unwrap();
b_doc.import(&a_doc.export_from(&b_doc.oplog_vv())).unwrap();
debug_log::group_end!();
} else {
debug_log::group!("Snapshot {} to {}", j, i);

View file

@ -58,7 +58,7 @@ impl TextHandler {
}
pub fn is_empty(&self) -> bool {
self.len() == 0
self.len_unicode() == 0
}
pub fn len_utf16(&self) -> usize {
@ -72,6 +72,17 @@ impl TextHandler {
})
}
pub fn len_unicode(&self) -> usize {
self.state
.upgrade()
.unwrap()
.lock()
.unwrap()
.with_state(self.container_idx, |state| {
state.as_text_state().as_ref().unwrap().len_chars()
})
}
pub fn len_utf8(&self) -> usize {
self.state
.upgrade()
@ -88,20 +99,15 @@ impl TextHandler {
impl TextHandler {
#[inline(always)]
pub fn insert(&self, txn: &mut Transaction, pos: usize, s: &str) -> LoroResult<()> {
self.insert_utf8(txn, pos, s)
self.insert_unicode(txn, pos, s)
}
#[inline(always)]
pub fn delete(&self, txn: &mut Transaction, pos: usize, len: usize) -> LoroResult<()> {
self.delete_utf8(txn, pos, len)
self.delete_unicode(txn, pos, len)
}
#[inline(always)]
pub fn len(&self) -> usize {
self.len_utf8()
}
pub fn insert_utf8(&self, txn: &mut Transaction, pos: usize, s: &str) -> LoroResult<()> {
pub fn insert_unicode(&self, txn: &mut Transaction, pos: usize, s: &str) -> LoroResult<()> {
if s.is_empty() {
return Ok(());
}
@ -109,7 +115,10 @@ impl TextHandler {
txn.apply_local_op(
self.container_idx,
crate::op::RawOpContent::List(crate::container::list::list_op::ListOp::Insert {
slice: ListSlice::RawStr(Cow::Borrowed(s)),
slice: ListSlice::RawStr {
str: Cow::Borrowed(s),
unicode_len: s.chars().count(),
},
pos,
}),
None,
@ -117,7 +126,7 @@ impl TextHandler {
)
}
pub fn delete_utf8(&self, txn: &mut Transaction, pos: usize, len: usize) -> LoroResult<()> {
pub fn delete_unicode(&self, txn: &mut Transaction, pos: usize, len: usize) -> LoroResult<()> {
if len == 0 {
return Ok(());
}
@ -147,13 +156,16 @@ impl TextHandler {
.with_state(self.container_idx, |state| {
let text_state = &state.as_text_state();
let text = text_state.as_ref().unwrap();
text.utf16_to_utf8(pos)
text.utf16_to_unicode(pos)
});
txn.apply_local_op(
self.container_idx,
crate::op::RawOpContent::List(crate::container::list::list_op::ListOp::Insert {
slice: ListSlice::RawStr(Cow::Borrowed(s)),
slice: ListSlice::RawStr {
str: Cow::Borrowed(s),
unicode_len: s.chars().count(),
},
pos: start,
}),
None,
@ -177,7 +189,7 @@ impl TextHandler {
.with_state(self.container_idx, |state| {
let text_state = &state.as_text_state();
let text = text_state.as_ref().unwrap();
(text.utf16_to_utf8(pos), text.utf16_to_utf8(pos + del))
(text.utf16_to_unicode(pos), text.utf16_to_unicode(pos + del))
});
txn.apply_local_op(
self.container_idx,
@ -193,11 +205,6 @@ impl TextHandler {
#[cfg(feature = "wasm")]
impl TextHandler {
#[inline(always)]
pub fn len(&self) -> usize {
self.len_utf16()
}
#[inline(always)]
pub fn delete(&self, txn: &mut Transaction, pos: usize, del: usize) -> LoroResult<()> {
self.delete_utf16(txn, pos, del)
@ -222,13 +229,16 @@ impl TextHandler {
.with_state(self.container_idx, |state| {
let text_state = &state.as_text_state();
let text = text_state.as_ref().unwrap();
text.utf16_to_utf8(pos)
text.utf16_to_unicode(pos)
});
txn.apply_local_op(
self.container_idx,
crate::op::RawOpContent::List(crate::container::list::list_op::ListOp::Insert {
slice: ListSlice::RawStr(Cow::Borrowed(s)),
slice: ListSlice::RawStr {
str: Cow::Borrowed(s),
unicode_len: s.chars().count(),
},
pos: start,
}),
Some(EventHint::Utf16 { pos, len: 0 }),
@ -250,7 +260,7 @@ impl TextHandler {
.with_state(self.container_idx, |state| {
let text_state = &state.as_text_state();
let text = text_state.as_ref().unwrap();
(text.utf16_to_utf8(pos), text.utf16_to_utf8(pos + del))
(text.utf16_to_unicode(pos), text.utf16_to_unicode(pos + del))
});
txn.apply_local_op(
self.container_idx,

View file

@ -71,6 +71,10 @@ impl LoroDoc {
self.oplog.lock().unwrap().is_empty() && self.state.lock().unwrap().is_empty()
}
pub fn is_detached(&self) -> bool {
self.detached
}
#[allow(unused)]
pub(super) fn from_existing(oplog: OpLog, state: DocState) -> Self {
let obs = Observer::new(oplog.arena.clone());
@ -116,11 +120,20 @@ impl LoroDoc {
});
}
/// Create a new transaction.
/// Every ops created inside one transaction will be packed into a single
/// [Change].
///
/// There can only be one active transaction at a time for a [LoroDoc].
#[inline(always)]
pub fn txn(&self) -> Result<Transaction, LoroError> {
self.txn_with_origin("")
}
/// Create a new transaction with specified origin.
///
/// The origin will be propagated to the events.
/// There can only be one active transaction at a time for a [LoroDoc].
pub fn txn_with_origin(&self, origin: &str) -> Result<Transaction, LoroError> {
let mut txn =
Transaction::new_with_origin(self.state.clone(), self.oplog.clone(), origin.into());
@ -238,10 +251,17 @@ impl LoroDoc {
ans
}
pub fn vv_cloned(&self) -> VersionVector {
/// Get the version vector of the current OpLog
pub fn oplog_vv(&self) -> VersionVector {
self.oplog.lock().unwrap().vv().clone()
}
/// Get the version vector of the current [DocState]
pub fn state_vv(&self) -> VersionVector {
let f = &self.state.lock().unwrap().frontiers;
self.oplog.lock().unwrap().dag.frontiers_to_vv(f)
}
/// id can be a str, ContainerID, or ContainerIdRaw.
/// if it's str it will use Root container, which will not be None
pub fn get_text<I: IntoContainerId>(&self, id: I) -> TextHandler {
@ -263,6 +283,7 @@ impl LoroDoc {
MapHandler::new(idx, Arc::downgrade(&self.state))
}
/// This is for debugging purpose. It will travel the whole oplog
pub fn diagnose_size(&self) {
self.oplog().lock().unwrap().diagnose_size();
}
@ -314,9 +335,45 @@ impl LoroDoc {
Ok(())
}
pub fn to_json(&self) -> LoroValue {
/// Get deep value of the document.
pub fn get_deep_value(&self) -> LoroValue {
self.state.lock().unwrap().get_deep_value()
}
/// Checkout [DocState] to a specific version.
///
/// This will make the current [DocState] detached from the latest version of [OpLog].
/// Any further import will not be reflected on the [DocState], until user call [LoroDoc::attach()]
pub fn checkout(&mut self, frontiers: &Frontiers) {
let oplog = self.oplog.lock().unwrap();
let mut state = self.state.lock().unwrap();
self.detached = true;
let mut calc = DiffCalculator::new();
let before = &oplog.dag.frontiers_to_vv(&state.frontiers);
let after = &oplog.dag.frontiers_to_vv(frontiers);
let diff = calc.calc_diff_internal(
&oplog,
before,
Some(&state.frontiers),
after,
Some(frontiers),
);
state.apply_diff(InternalDocDiff {
origin: "checkout".into(),
local: true,
diff: Cow::Owned(diff),
new_version: Cow::Owned(frontiers.clone()),
});
}
pub fn vv_to_frontiers(&self, vv: &VersionVector) -> Frontiers {
self.oplog.lock().unwrap().dag.vv_to_frontiers(vv)
}
pub fn frontiers_to_vv(&self, frontiers: &Frontiers) -> VersionVector {
self.oplog.lock().unwrap().dag.frontiers_to_vv(frontiers)
}
}
impl Default for LoroDoc {
@ -327,10 +384,58 @@ impl Default for LoroDoc {
#[cfg(test)]
mod test {
use loro_common::ID;
use crate::{version::Frontiers, LoroDoc, ToJson};
#[test]
fn test_sync() {
fn is_send_sync<T: Send + Sync>(_v: T) {}
let loro = super::LoroDoc::new();
is_send_sync(loro)
}
#[test]
fn test_checkout() {
let mut loro = LoroDoc::new();
loro.set_peer_id(1);
let text = loro.get_text("text");
let map = loro.get_map("map");
let list = loro.get_list("list");
let mut txn = loro.txn().unwrap();
for i in 0..10 {
map.insert(&mut txn, "key", i.into()).unwrap();
text.insert(&mut txn, 0, &i.to_string()).unwrap();
list.insert(&mut txn, 0, i.into()).unwrap();
}
txn.commit().unwrap();
let mut b = LoroDoc::new();
b.import(&loro.export_snapshot()).unwrap();
loro.checkout(&Frontiers::default());
{
let json = &loro.get_deep_value();
assert_eq!(json.to_json(), r#"{"text":"","list":[],"map":{}}"#);
}
b.checkout(&ID::new(1, 2).into());
{
let json = &b.get_deep_value();
assert_eq!(json.to_json(), r#"{"text":"0","list":[0],"map":{"key":0}}"#);
}
loro.checkout(&ID::new(1, 3).into());
{
let json = &loro.get_deep_value();
assert_eq!(json.to_json(), r#"{"text":"0","list":[0],"map":{"key":1}}"#);
}
b.checkout(&ID::new(1, 29).into());
{
let json = &b.get_deep_value();
assert_eq!(
json.to_json(),
r#"{"text":"9876543210","list":[9,8,7,6,5,4,3,2,1,0],"map":{"key":9}}"#
);
}
}
}

View file

@ -214,11 +214,6 @@ impl OpLog {
}
}
pub fn get_timestamp(&self) -> Timestamp {
// TODO: get timestamp
0
}
pub fn next_lamport(&self) -> Lamport {
self.next_lamport
}
@ -281,6 +276,16 @@ impl OpLog {
changes
}
pub fn get_change_at(&self, id: ID) -> Option<Change> {
if let Some(peer_changes) = self.changes.get(&id.peer) {
if let Some(result) = peer_changes.get_by_atom_index(id.counter) {
return Some(peer_changes.vec()[result.merged_index].clone());
}
}
None
}
fn convert_change_to_remote(&self, change: &Change) -> Change<RemoteOp> {
let mut ops = RleVec::new();
for op in change.ops.iter() {
@ -324,13 +329,16 @@ impl OpLog {
crate::op::InnerContent::List(list) => match list {
list_op::InnerListOp::Insert { slice, pos } => match container.container_type() {
loro_common::ContainerType::Text => {
let str = self
.arena
.slice_str(slice.0.start as usize..slice.0.end as usize);
contents.push(RawOpContent::List(list_op::ListOp::Insert {
slice: crate::container::text::text_content::ListSlice::RawBytes(
self.arena
.slice_bytes(slice.0.start as usize..slice.0.end as usize),
),
slice: crate::container::text::text_content::ListSlice::RawStr {
unicode_len: str.chars().count(),
str: Cow::Owned(str),
},
pos: *pos,
}))
}));
}
loro_common::ContainerType::List => {
contents.push(RawOpContent::List(list_op::ListOp::Insert {

View file

@ -648,14 +648,17 @@ fn encode_oplog(oplog: &OpLog, state_ref: Option<PreEncodedState>) -> FinalPhase
} else {
match op.container.get_type() {
loro_common::ContainerType::Text => {
let slice = oplog
.arena
.slice_bytes(slice.0.start as usize..slice.0.end as usize);
encoded_ops.push(record_str(
&slice,
*pos,
op.container.to_index(),
));
let range = slice.0.start as usize..slice.0.end as usize;
let mut pos = *pos;
oplog.arena.with_text_slice(range, |slice| {
encoded_ops.push(record_str(
slice.as_bytes(),
pos,
op.container.to_index(),
));
pos += slice.chars().count();
})
}
loro_common::ContainerType::List => {
let values = oplog
@ -771,14 +774,14 @@ mod test {
fn test_snapshot_encode() {
use std::borrow::Cow;
dbg!(FinalPhase {
FinalPhase {
common: Cow::Owned(vec![0, 1, 2, 253, 254, 255]),
app_state: Cow::Owned(vec![255]),
state_arena: Cow::Owned(vec![255]),
oplog_extra_arena: Cow::Owned(vec![255]),
oplog: Cow::Owned(vec![255]),
}
.encode());
.encode();
}
#[test]

View file

@ -559,10 +559,12 @@ impl DocState {
let len = value.0.iter().fold(0, |acc, cur| acc + cur.0.len());
let mut s = String::with_capacity(len);
for slice in value.0.iter() {
let bytes = self
.arena
.slice_bytes(slice.0.start as usize..slice.0.end as usize);
s.push_str(std::str::from_utf8(&bytes).unwrap());
self.arena.with_text_slice(
slice.0.start as usize..slice.0.end as usize,
|slice| {
s.push_str(slice);
},
)
}
ans.push(DeltaItem::Insert { value: s, meta: () });
index += len;

View file

@ -61,11 +61,11 @@ impl ContainerState for TextState {
index += len;
}
DeltaItem::Insert { value, .. } => {
self.insert_utf8(index, value);
self.insert_unicode(index, value);
index += value.len();
}
DeltaItem::Delete { len, .. } => {
self.delete_utf8(index..index + len);
self.delete_unicode(index..index + len);
}
}
}
@ -78,13 +78,16 @@ impl ContainerState for TextState {
match op.content {
RawOpContent::List(list) => match list {
crate::container::list::list_op::ListOp::Insert { slice, pos } => match slice {
ListSlice::RawStr(s) => {
self.insert_utf8(pos, &s);
ListSlice::RawStr {
str,
unicode_len: _,
} => {
self.insert_unicode(pos, &str);
}
_ => unreachable!(),
},
crate::container::list::list_op::ListOp::Delete(del) => {
self.delete_utf8(del.pos as usize..del.pos as usize + del.len as usize);
self.delete_unicode(del.pos as usize..del.pos as usize + del.len as usize);
}
},
_ => unreachable!(),
@ -153,11 +156,11 @@ impl TextState {
pub fn from_str(s: &str) -> Self {
let mut state = Self::new();
state.insert_utf8(0, s);
state.insert_unicode(0, s);
state
}
pub fn insert_utf8(&mut self, pos: usize, s: &str) {
pub fn insert_unicode(&mut self, pos: usize, s: &str) {
if self.in_txn {
self.record_insert(pos, s.len());
}
@ -165,7 +168,7 @@ impl TextState {
self.rope.insert(pos, s);
}
pub fn delete_utf8(&mut self, range: Range<usize>) {
pub fn delete_unicode(&mut self, range: Range<usize>) {
if range.is_empty() {
return;
}
@ -225,7 +228,7 @@ impl TextState {
self.rope.slice_substrings(0..self.len())
}
pub(crate) fn utf16_to_utf8(&self, pos: usize) -> usize {
pub(crate) fn utf16_to_unicode(&self, pos: usize) -> usize {
self.rope.wchars_to_chars(pos)
}
@ -247,13 +250,17 @@ impl TextState {
}
DeltaItem::Insert { value, .. } => {
for value in value.0.iter() {
let s = arena.slice_bytes(value.0.start as usize..value.0.end as usize);
self.insert_utf8(index, std::str::from_utf8(&s).unwrap());
index += s.len();
arena.with_text_slice(
value.0.start as usize..value.0.end as usize,
|slice| {
self.insert_unicode(index, slice);
index += slice.len();
},
);
}
}
DeltaItem::Delete { len, .. } => {
self.delete_utf8(index..index + len);
self.delete_unicode(index..index + len);
}
}
}
@ -274,7 +281,7 @@ impl TextState {
match span {
DeltaItem::Retain { len, meta: _ } => {
index += len;
let next_utf16_index = self.utf8_to_utf16(index);
let next_utf16_index = self.unicode_to_utf16(index);
new_delta = new_delta.retain(next_utf16_index - utf16_index);
utf16_index = next_utf16_index;
}
@ -282,15 +289,17 @@ impl TextState {
new_delta = new_delta.insert(value.clone());
let start_utf16_len = self.len_wchars();
for value in value.0.iter() {
let s = arena.slice_bytes(value.0.start as usize..value.0.end as usize);
self.insert_utf8(index, std::str::from_utf8(&s).unwrap());
index += s.len();
let range = value.0.start as usize..value.0.end as usize;
arena.with_text_slice(range, |s| {
self.insert_unicode(index, s);
index += s.len();
});
}
utf16_index += self.len_wchars() - start_utf16_len;
}
DeltaItem::Delete { len, .. } => {
let start_utf16_len = self.len_wchars();
self.delete_utf8(index..index + len);
self.delete_unicode(index..index + len);
new_delta = new_delta.delete(start_utf16_len - self.len_wchars());
}
}
@ -299,7 +308,7 @@ impl TextState {
Some(Diff::SeqRawUtf16(new_delta))
}
fn utf8_to_utf16(&self, index: usize) -> usize {
fn unicode_to_utf16(&self, index: usize) -> usize {
self.rope.chars_to_wchars(index)
}
}
@ -317,10 +326,10 @@ mod test {
#[test]
fn abort_txn() {
let mut state = TextState::new();
state.insert_utf8(0, "haha");
state.insert_unicode(0, "haha");
state.start_txn();
state.insert_utf8(4, "1234");
state.delete_utf8(2..6);
state.insert_unicode(4, "1234");
state.delete_unicode(2..6);
assert_eq!(state.rope.to_string(), "ha34");
state.abort_txn();
assert_eq!(state.rope.to_string(), "haha");

View file

@ -12,7 +12,7 @@ use rle::{HasLength, RleVec};
use smallvec::smallvec;
use crate::{
change::{Change, Lamport},
change::{get_sys_timestamp, Change, Lamport},
container::{
idx::ContainerIdx, list::list_op::InnerListOp, text::text_content::SliceRanges,
IntoContainerId,
@ -147,7 +147,7 @@ impl Transaction {
ops,
deps,
id: ID::new(self.peer, self.start_counter),
timestamp: oplog.get_timestamp(),
timestamp: oplog.latest_timestamp.max(get_sys_timestamp()),
};
let diff = if state.is_recording() {

View file

@ -164,6 +164,12 @@ impl From<&[ID]> for Frontiers {
}
}
impl From<ID> for Frontiers {
fn from(value: ID) -> Self {
Self([value].into())
}
}
impl From<&Vec<ID>> for Frontiers {
fn from(value: &Vec<ID>) -> Self {
let ids: &[ID] = value;

View file

@ -1 +1,47 @@
use loro_common::ID;
use loro_internal::{version::Frontiers, LoroDoc};
#[test]
fn test_timestamp() {
let doc = LoroDoc::new();
let text = doc.get_text("text");
let mut txn = doc.txn().unwrap();
text.insert(&mut txn, 0, "123").unwrap();
txn.commit().unwrap();
let change = doc
.oplog()
.lock()
.unwrap()
.get_change_at(ID::new(doc.peer_id(), 0))
.unwrap();
assert!(change.timestamp() > 1690966970);
}
#[test]
fn test_checkout() {
let mut doc = LoroDoc::new();
let text = doc.get_text("text");
let mut txn = doc.txn().unwrap();
text.insert(&mut txn, 0, "你界").unwrap();
text.insert(&mut txn, 1, "好世").unwrap();
txn.commit().unwrap();
{
doc.checkout(&Frontiers::from([ID::new(doc.peer_id(), 0)].as_slice()));
assert_eq!(text.get_value().as_string().unwrap().as_str(), "");
}
{
doc.checkout(&Frontiers::from([ID::new(doc.peer_id(), 1)].as_slice()));
assert_eq!(text.get_value().as_string().unwrap().as_str(), "你界");
}
{
doc.checkout(&Frontiers::from([ID::new(doc.peer_id(), 2)].as_slice()));
assert_eq!(text.get_value().as_string().unwrap().as_str(), "你好界");
}
{
doc.checkout(&Frontiers::from([ID::new(doc.peer_id(), 3)].as_slice()));
assert_eq!(text.get_value().as_string().unwrap().as_str(), "你好世界");
}
assert_eq!(text.len_unicode(), 4);
assert_eq!(text.len_utf8(), 12);
assert_eq!(text.len_unicode(), 4);
}

View file

@ -1,9 +1,10 @@
use js_sys::{Array, Promise, Reflect, Uint8Array};
use js_sys::{Array, Object, Promise, Reflect, Uint8Array};
use loro_internal::{
configure::SecureRandomGenerator,
container::ContainerID,
event::{Diff, Index},
handler::{ListHandler, MapHandler, TextHandler},
id::{Counter, ID},
obs::SubID,
txn::Transaction as Txn,
version::Frontiers,
@ -58,7 +59,8 @@ extern "C" {
pub type JsTransaction;
#[wasm_bindgen(typescript_type = "string | undefined")]
pub type JsOrigin;
#[wasm_bindgen(typescript_type = "{ peer: bigint, counter: number }")]
pub type JsID;
}
struct MathRandom;
@ -117,6 +119,30 @@ mod observer {
unsafe impl Sync for Observer {}
}
fn ids_to_frontiers(ids: Vec<JsID>) -> JsResult<Frontiers> {
let mut frontiers = Frontiers::default();
for id in ids {
let peer: u64 = Reflect::get(&id, &"peer".into())?.try_into()?;
let counter = Reflect::get(&id, &"counter".into())?.as_f64().unwrap() as Counter;
frontiers.push(ID::new(peer, counter));
}
Ok(frontiers)
}
fn frontiers_to_ids(frontiers: &Frontiers) -> Vec<JsID> {
let mut ans = Vec::with_capacity(frontiers.len());
for id in frontiers.iter() {
let obj = Object::new();
Reflect::set(&obj, &"peer".into(), &id.peer.into()).unwrap();
Reflect::set(&obj, &"counter".into(), &id.counter.into()).unwrap();
let value: JsValue = obj.into_js_result().unwrap();
ans.push(value.into());
}
ans
}
#[wasm_bindgen]
impl Loro {
#[wasm_bindgen(constructor)]
@ -136,6 +162,15 @@ impl Loro {
))
}
pub fn attach(&mut self) {
self.0.attach();
}
pub fn checkout(&mut self, frontiers: Vec<JsID>) -> JsResult<()> {
self.0.checkout(&ids_to_frontiers(frontiers)?);
Ok(())
}
#[wasm_bindgen(js_name = "peerId", method, getter)]
pub fn peer_id(&self) -> u64 {
self.0.peer_id()
@ -181,12 +216,12 @@ impl Loro {
#[inline(always)]
pub fn version(&self) -> Vec<u8> {
self.0.vv_cloned().encode()
self.0.oplog_vv().encode()
}
#[inline]
pub fn frontiers(&self) -> Vec<u8> {
self.0.frontiers().encode()
pub fn frontiers(&self) -> Vec<JsID> {
frontiers_to_ids(&self.0.frontiers())
}
/// - -1: self's version is less than frontiers or is parallel to target
@ -194,8 +229,8 @@ impl Loro {
/// - 1: self's version is greater than frontiers
#[inline]
#[wasm_bindgen(js_name = "cmpFrontiers")]
pub fn cmp_frontiers(&self, frontiers: &[u8]) -> JsResult<i32> {
let frontiers = Frontiers::decode(frontiers)?;
pub fn cmp_frontiers(&self, frontiers: Vec<JsID>) -> JsResult<i32> {
let frontiers = ids_to_frontiers(frontiers)?;
Ok(match self.0.cmp_frontiers(&frontiers) {
Ordering::Less => -1,
Ordering::Greater => 1,
@ -247,7 +282,7 @@ impl Loro {
#[wasm_bindgen(js_name = "toJson")]
pub fn to_json(&self) -> JsResult<JsValue> {
let json = self.0.to_json();
let json = self.0.get_deep_value();
Ok(json.into())
}
@ -441,7 +476,7 @@ impl LoroText {
#[wasm_bindgen(js_name = "length", method, getter)]
pub fn length(&self) -> usize {
self.0.len()
self.0.len_utf16()
}
pub fn subscribe(&self, loro: &Loro, f: js_sys::Function) -> JsResult<u32> {

View file

@ -156,8 +156,6 @@ impl<'a, T: Rle, A: RleTreeTrait<T>> Node<'a, T, A> {
#[cfg(debug_assertions)]
if ans.is_none() {
dbg!(parent);
dbg!(self);
unreachable!();
}

View file

@ -843,7 +843,6 @@ fn slice<T: HasLength + Sliceable>(
) -> SmallVec<[T; 2]> {
let mut index = beginning;
let mut ans = smallvec::smallvec![];
dbg!(from, to);
for item in vec.iter() {
if index < to && from < index + item.atom_len() {
let start = if index < from { from - index } else { 0 };

View file

@ -116,6 +116,4 @@ fn delete_that_causes_increase_levels() {
tree.delete_range(Some(i), Some(i + 1));
tree.debug_check();
}
dbg!(tree);
}

View file

@ -228,8 +228,7 @@ impl<T: Rle, const MAX_CHILD: usize, TreeArena: Arena> RleTreeTrait<T>
}
if index > 0 {
dbg!(&node);
assert_eq!(index, 0);
assert_eq!(index, 0, "index out of range {}", index);
}
FindPosResult::new(node.children().len() - 1, last_cache, Position::End)
}

View file

@ -0,0 +1,85 @@
import { describe, expect, it } from "vitest";
import {
Loro,
setPanicHook,
} from "../src";
setPanicHook();
describe("Checkout", () => {
it("simple checkout", () => {
const doc = new Loro();
const text = doc.getText("text");
doc.transact(txn => {
text.insert(txn, 0, "hello world");
});
const v = doc.frontiers();
doc.transact(txn => {
text.insert(txn, 0, "000");
});
expect(doc.toJson()).toStrictEqual({
text: "000hello world"
});
doc.checkout(v);
expect(doc.toJson()).toStrictEqual({
text: "hello world"
});
v[0].counter -= 1;
doc.checkout(v);
expect(doc.toJson()).toStrictEqual({
text: "hello worl"
});
});
it("Chinese char", () => {
const doc = new Loro();
const text = doc.getText("text");
doc.transact(txn => {
text.insert(txn, 0, "你好世界");
});
const v = doc.frontiers();
expect(v[0].counter).toBe(3);
v[0].counter -= 1;
doc.checkout(v);
expect(doc.toJson()).toStrictEqual({
text: "你好世"
});
v[0].counter -= 1;
doc.checkout(v);
expect(doc.toJson()).toStrictEqual({
text: "你好"
});
v[0].counter -= 1;
doc.checkout(v);
expect(doc.toJson()).toStrictEqual({
text: "你"
});
})
it("two clients", () => {
const doc = new Loro();
const text = doc.getText("text");
const txn = doc.newTransaction("");
text.insert(txn, 0, "0");
txn.commit();
const v0 = doc.frontiers();
const docB = new Loro();
docB.import(doc.exportFrom());
expect(docB.cmpFrontiers(v0)).toBe(0);
doc.transact((t) => {
text.insert(t, 1, "0");
});
expect(docB.cmpFrontiers(doc.frontiers())).toBe(-1);
const textB = docB.getText("text");
docB.transact((t) => {
textB.insert(t, 0, "0");
});
expect(docB.cmpFrontiers(doc.frontiers())).toBe(-1);
docB.import(doc.exportFrom());
expect(docB.cmpFrontiers(doc.frontiers())).toBe(1);
doc.import(docB.exportFrom());
expect(docB.cmpFrontiers(doc.frontiers())).toBe(0);
});
});

View file

@ -1,12 +1,7 @@
import { describe, expect, it } from "vitest";
import {
Delta,
ListDiff,
Loro,
LoroEvent,
MapDiff as MapDiff,
setPanicHook,
TextDiff,
} from "../src";
setPanicHook();