mirror of
https://github.com/loro-dev/loro.git
synced 2025-02-05 12:14:43 +00:00
fix: map.keys() may return keys from deleted entries (#618)
* fix: map.keys() may return keys from deleted entries * chore: changeset * chore: fix latest clippy warning
This commit is contained in:
parent
d5ec926bb4
commit
07500dab34
21 changed files with 92 additions and 32 deletions
5
.changeset/moody-icons-mix.md
Normal file
5
.changeset/moody-icons-mix.md
Normal file
|
@ -0,0 +1,5 @@
|
|||
---
|
||||
"loro-crdt": patch
|
||||
---
|
||||
|
||||
fix: map.keys() may return keys from deleted entries #618
|
|
@ -149,7 +149,7 @@ impl<T: Rng> RandomCharIter<T> {
|
|||
pub fn new(rng: T) -> Self {
|
||||
Self {
|
||||
rng,
|
||||
simple_text: std::env::var("SIMPLE_TEXT").map_or(false, |v| !v.is_empty()),
|
||||
simple_text: std::env::var("SIMPLE_TEXT").is_ok_and(|v| !v.is_empty()),
|
||||
}
|
||||
}
|
||||
|
||||
|
|
|
@ -1,3 +1,4 @@
|
|||
#![allow(unexpected_cfgs)]
|
||||
use loro_delta::{array_vec::ArrayVec, DeltaRope, DeltaRopeBuilder};
|
||||
use tracing_subscriber::fmt::format::FmtSpan;
|
||||
|
||||
|
|
|
@ -1,3 +1,4 @@
|
|||
#![allow(unexpected_cfgs)]
|
||||
use std::collections::HashMap;
|
||||
|
||||
use loro_delta::{
|
||||
|
|
|
@ -1,3 +1,4 @@
|
|||
#![allow(unexpected_cfgs)]
|
||||
use examples::json::fuzz;
|
||||
use loro::loro_value;
|
||||
|
||||
|
|
|
@ -1,4 +1,5 @@
|
|||
#![allow(deprecated)]
|
||||
#![allow(unexpected_cfgs)]
|
||||
use std::sync::Arc;
|
||||
|
||||
use loro::{ToJson as _, ID};
|
||||
|
|
|
@ -1,3 +1,4 @@
|
|||
#![allow(unexpected_cfgs)]
|
||||
use fuzz::{
|
||||
actions::{ActionWrapper::*, GenericAction},
|
||||
crdt_fuzzer::{test_multi_sites, Action::*, FuzzTarget, FuzzValue::*},
|
||||
|
|
|
@ -1,3 +1,4 @@
|
|||
#![allow(unexpected_cfgs)]
|
||||
#![allow(deprecated)]
|
||||
use fuzz::{
|
||||
actions::{ActionWrapper::*, GenericAction},
|
||||
|
|
|
@ -1,3 +1,4 @@
|
|||
#![allow(unexpected_cfgs)]
|
||||
use fuzz::{kv_minify_simple, test_mem_kv_fuzzer, KVAction::*};
|
||||
|
||||
#[ctor::ctor]
|
||||
|
@ -146,21 +147,21 @@ fn merge_import() {
|
|||
#[test]
|
||||
fn scan_empty() {
|
||||
test_mem_kv_fuzzer(&mut [
|
||||
Add{
|
||||
key: vec![0, 255],
|
||||
value: vec![]
|
||||
},
|
||||
Add{
|
||||
key: vec![],
|
||||
value: vec![]
|
||||
},
|
||||
Scan{
|
||||
start: 129,
|
||||
end: 0,
|
||||
start_include: false,
|
||||
end_include: false
|
||||
},
|
||||
])
|
||||
Add {
|
||||
key: vec![0, 255],
|
||||
value: vec![],
|
||||
},
|
||||
Add {
|
||||
key: vec![],
|
||||
value: vec![],
|
||||
},
|
||||
Scan {
|
||||
start: 129,
|
||||
end: 0,
|
||||
start_include: false,
|
||||
end_include: false,
|
||||
},
|
||||
])
|
||||
}
|
||||
#[test]
|
||||
fn minify() {
|
||||
|
|
|
@ -1,3 +1,4 @@
|
|||
#![allow(unexpected_cfgs)]
|
||||
#![allow(deprecated)]
|
||||
|
||||
use arbitrary::Unstructured;
|
||||
|
|
|
@ -1,3 +1,4 @@
|
|||
#![allow(unexpected_cfgs)]
|
||||
#[allow(unused_imports)]
|
||||
use fuzz::{
|
||||
actions::{ActionInner, ActionWrapper::*, GenericAction},
|
||||
|
|
|
@ -1,3 +1,4 @@
|
|||
#![allow(unexpected_cfgs)]
|
||||
use fuzz::{
|
||||
actions::{ActionWrapper::*, GenericAction},
|
||||
crdt_fuzzer::{minify_simple, test_multi_sites, Action::*, FuzzTarget, FuzzValue::*},
|
||||
|
|
|
@ -1,3 +1,4 @@
|
|||
#![allow(unexpected_cfgs)]
|
||||
use bytes::Bytes;
|
||||
use loro_kv_store::{mem_store::MemKvConfig, MemKvStore};
|
||||
|
||||
|
|
|
@ -32,7 +32,7 @@ fn log_size() {
|
|||
println!("\n");
|
||||
println!("Snapshot size={}", snapshot.len());
|
||||
println!("Updates size={}", updates.len());
|
||||
println!("Json Updates size={}", json_updates.as_bytes().len());
|
||||
println!("Json Updates size={}", json_updates.len());
|
||||
println!("\n");
|
||||
loro.diagnose_size();
|
||||
}
|
||||
|
|
|
@ -77,7 +77,7 @@ impl BoolRleVec {
|
|||
self.len -= n;
|
||||
|
||||
// Remove any trailing zero-length runs
|
||||
while self.rle_vec.last().map_or(false, |&x| x == 0) {
|
||||
while self.rle_vec.last().is_some_and(|&x| x == 0) {
|
||||
self.rle_vec.pop();
|
||||
}
|
||||
}
|
||||
|
|
|
@ -1615,10 +1615,7 @@ impl TextHandler {
|
|||
match &self.inner {
|
||||
MaybeDetached::Detached(t) => {
|
||||
let mut t = t.try_lock().unwrap();
|
||||
let ranges = match t.value.get_text_entity_ranges(pos, len, PosType::Bytes) {
|
||||
Err(x) => return Err(x),
|
||||
Ok(x) => x,
|
||||
};
|
||||
let ranges = t.value.get_text_entity_ranges(pos, len, PosType::Bytes)?;
|
||||
for range in ranges.iter().rev() {
|
||||
t.value
|
||||
.drain_by_entity_index(range.entity_start, range.entity_len(), None);
|
||||
|
@ -1635,10 +1632,7 @@ impl TextHandler {
|
|||
match &self.inner {
|
||||
MaybeDetached::Detached(t) => {
|
||||
let mut t = t.try_lock().unwrap();
|
||||
let ranges = match t.value.get_text_entity_ranges(pos, len, PosType::Unicode) {
|
||||
Err(x) => return Err(x),
|
||||
Ok(x) => x,
|
||||
};
|
||||
let ranges = t.value.get_text_entity_ranges(pos, len, PosType::Unicode)?;
|
||||
for range in ranges.iter().rev() {
|
||||
t.value
|
||||
.drain_by_entity_index(range.entity_start, range.entity_len(), None);
|
||||
|
@ -3827,8 +3821,10 @@ impl MapHandler {
|
|||
}
|
||||
MaybeDetached::Attached(a) => {
|
||||
a.with_state(|state| {
|
||||
for (k, _) in state.as_map_state().unwrap().iter() {
|
||||
keys.push(k.clone());
|
||||
for (k, v) in state.as_map_state().unwrap().iter() {
|
||||
if v.value.is_some() {
|
||||
keys.push(k.clone());
|
||||
}
|
||||
}
|
||||
});
|
||||
}
|
||||
|
|
|
@ -903,9 +903,7 @@ impl TreeState {
|
|||
///
|
||||
/// O(1)
|
||||
pub fn is_parent(&self, target: &TreeID, parent: &TreeParentId) -> bool {
|
||||
self.trees
|
||||
.get(target)
|
||||
.map_or(false, |x| x.parent == *parent)
|
||||
self.trees.get(target).is_some_and(|x| x.parent == *parent)
|
||||
}
|
||||
|
||||
/// Delete the position cache of the node
|
||||
|
|
|
@ -66,7 +66,7 @@ impl InternalMap {
|
|||
fn contains(&self, id: &ID) -> bool {
|
||||
self.0
|
||||
.get(&id.peer)
|
||||
.map_or(false, |&counter| counter == id.counter)
|
||||
.is_some_and(|&counter| counter == id.counter)
|
||||
}
|
||||
|
||||
fn insert(&mut self, id: ID) {
|
||||
|
|
|
@ -1,3 +1,4 @@
|
|||
#![allow(unexpected_cfgs)]
|
||||
use loro::LoroDoc;
|
||||
|
||||
#[ctor::ctor]
|
||||
|
|
|
@ -1,7 +1,9 @@
|
|||
#![allow(deprecated)]
|
||||
#![allow(unexpected_cfgs)]
|
||||
use pretty_assertions::assert_eq;
|
||||
use std::{
|
||||
cmp::Ordering,
|
||||
collections::HashSet,
|
||||
ops::ControlFlow,
|
||||
sync::{
|
||||
atomic::{AtomicBool, AtomicU64},
|
||||
|
@ -2960,6 +2962,7 @@ fn test_diff_apply_with_unknown_container() -> LoroResult<()> {
|
|||
Ok(())
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_set_merge_interval() {
|
||||
let doc = LoroDoc::new();
|
||||
doc.set_record_timestamp(true);
|
||||
|
@ -2982,3 +2985,48 @@ fn test_set_merge_interval() {
|
|||
assert_eq!(new_doc.len_changes(), 2);
|
||||
}
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_child_container_attach_behavior() {
|
||||
let map = LoroMap::new();
|
||||
let child = map.insert_container("child", LoroMap::new()).unwrap();
|
||||
let doc = LoroDoc::new();
|
||||
doc.get_map("meta").insert_container("map", map).unwrap();
|
||||
assert_eq!(
|
||||
doc.get_deep_value().to_json_value(),
|
||||
json!({
|
||||
"meta": { "map": { "child": {} } }
|
||||
})
|
||||
);
|
||||
let attached = child.get_attached().unwrap();
|
||||
attached.insert("key", "value").unwrap();
|
||||
assert_eq!(
|
||||
doc.get_deep_value().to_json_value(),
|
||||
json!({
|
||||
"meta": { "map": { "child": { "key": "value" } } }
|
||||
})
|
||||
);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_map_keys_values_for_each() {
|
||||
let doc = LoroDoc::new();
|
||||
let map = doc.get_map("map");
|
||||
map.insert("a", "b").unwrap();
|
||||
map.insert("c", "d").unwrap();
|
||||
map.insert("e", "f").unwrap();
|
||||
map.delete("c").unwrap();
|
||||
let mut keys = HashSet::new();
|
||||
let mut values = HashSet::new();
|
||||
map.for_each(|k, v| {
|
||||
keys.insert(k.to_string());
|
||||
values.insert(v.into_value().unwrap().into_string().unwrap().to_string());
|
||||
});
|
||||
let keys2 = map.keys().map(|k| k.to_string()).collect::<HashSet<_>>();
|
||||
let values2 = map
|
||||
.values()
|
||||
.map(|v| v.into_value().unwrap().into_string().unwrap().to_string())
|
||||
.collect::<HashSet<_>>();
|
||||
assert_eq!(keys, keys2);
|
||||
assert_eq!(values, values2);
|
||||
}
|
||||
|
|
|
@ -1,3 +1,4 @@
|
|||
#![allow(unexpected_cfgs)]
|
||||
#![allow(deprecated)]
|
||||
|
||||
use loro::{LoroDoc, LoroError, ToJson};
|
||||
|
|
Loading…
Reference in a new issue