Fix bugs from channel moving (#3000)

This PR fixes several bugs related to channel moving and it's
unintuitive behavior when attempting to re-order channels

Release Notes:

- N/A
This commit is contained in:
Mikayla Maki 2023-09-20 17:01:14 -07:00 committed by Max Brunsfeld
parent afe1fb3d1c
commit 029a443f92
7 changed files with 202 additions and 159 deletions

View file

@ -3,10 +3,7 @@ mod channel_index;
use crate::{channel_buffer::ChannelBuffer, channel_chat::ChannelChat};
use anyhow::{anyhow, Result};
use client::{Client, Subscription, User, UserId, UserStore};
use collections::{
hash_map::{self, DefaultHasher},
HashMap, HashSet,
};
use collections::{hash_map, HashMap, HashSet};
use futures::{channel::mpsc, future::Shared, Future, FutureExt, StreamExt};
use gpui::{AppContext, AsyncAppContext, Entity, ModelContext, ModelHandle, Task, WeakModelHandle};
use rpc::{
@ -14,14 +11,7 @@ use rpc::{
TypedEnvelope,
};
use serde_derive::{Deserialize, Serialize};
use std::{
borrow::Cow,
hash::{Hash, Hasher},
mem,
ops::Deref,
sync::Arc,
time::Duration,
};
use std::{borrow::Cow, hash::Hash, mem, ops::Deref, sync::Arc, time::Duration};
use util::ResultExt;
use self::channel_index::ChannelIndex;
@ -910,12 +900,6 @@ impl ChannelPath {
pub fn channel_id(&self) -> ChannelId {
self.0[self.0.len() - 1]
}
pub fn unique_id(&self) -> u64 {
let mut hasher = DefaultHasher::new();
self.0.deref().hash(&mut hasher);
hasher.finish()
}
}
impl From<ChannelPath> for Cow<'static, ChannelPath> {

View file

@ -828,68 +828,53 @@ impl Database {
) -> Result<ChannelGraph> {
self.check_user_is_channel_admin(to, user, &*tx).await?;
let to_ancestors = self.get_channel_ancestors(to, &*tx).await?;
let paths = channel_path::Entity::find()
.filter(channel_path::Column::IdPath.like(&format!("%/{}/%", channel)))
.all(tx)
.await?;
let mut new_path_suffixes = HashSet::default();
for path in paths {
if let Some(start_offset) = path.id_path.find(&format!("/{}/", channel)) {
new_path_suffixes.insert((
path.channel_id,
path.id_path[(start_offset + 1)..].to_string(),
));
}
}
let paths_to_new_parent = channel_path::Entity::find()
.filter(channel_path::Column::ChannelId.eq(to))
.all(tx)
.await?;
let mut new_paths = Vec::new();
for path in paths_to_new_parent {
if path.id_path.contains(&format!("/{}/", channel)) {
Err(anyhow!("cycle"))?;
}
new_paths.extend(new_path_suffixes.iter().map(|(channel_id, path_suffix)| {
channel_path::ActiveModel {
channel_id: ActiveValue::Set(*channel_id),
id_path: ActiveValue::Set(format!("{}{}", &path.id_path, path_suffix)),
}
}));
}
channel_path::Entity::insert_many(new_paths)
.exec(&*tx)
.await?;
// remove any root edges for the channel we just linked
{
channel_path::Entity::delete_many()
.filter(channel_path::Column::IdPath.like(&format!("/{}/%", channel)))
.exec(&*tx)
.await?;
}
let mut channel_descendants = self.get_channel_descendants([channel], &*tx).await?;
for ancestor in to_ancestors {
if channel_descendants.contains_key(&ancestor) {
return Err(anyhow!("Cannot create a channel cycle").into());
}
}
// Now insert all of the new paths
let sql = r#"
INSERT INTO channel_paths
(id_path, channel_id)
SELECT
id_path || $1 || '/', $2
FROM
channel_paths
WHERE
channel_id = $3
ON CONFLICT (id_path) DO NOTHING;
"#;
let channel_paths_stmt = Statement::from_sql_and_values(
self.pool.get_database_backend(),
sql,
[
channel.to_proto().into(),
channel.to_proto().into(),
to.to_proto().into(),
],
);
tx.execute(channel_paths_stmt).await?;
for (descdenant_id, descendant_parent_ids) in
channel_descendants.iter().filter(|(id, _)| id != &&channel)
{
for descendant_parent_id in descendant_parent_ids.iter() {
let channel_paths_stmt = Statement::from_sql_and_values(
self.pool.get_database_backend(),
sql,
[
descdenant_id.to_proto().into(),
descdenant_id.to_proto().into(),
descendant_parent_id.to_proto().into(),
],
);
tx.execute(channel_paths_stmt).await?;
}
}
// If we're linking a channel, remove any root edges for the channel
{
let sql = r#"
DELETE FROM channel_paths
WHERE
id_path = '/' || $1 || '/'
"#;
let channel_paths_stmt = Statement::from_sql_and_values(
self.pool.get_database_backend(),
sql,
[channel.to_proto().into()],
);
tx.execute(channel_paths_stmt).await?;
}
if let Some(channel) = channel_descendants.get_mut(&channel) {
// Remove the other parents
channel.clear();
@ -938,33 +923,41 @@ impl Database {
let sql = r#"
DELETE FROM channel_paths
WHERE
id_path LIKE '%' || $1 || '/' || $2 || '%'
id_path LIKE '%/' || $1 || '/' || $2 || '/%'
RETURNING id_path, channel_id
"#;
let channel_paths_stmt = Statement::from_sql_and_values(
let paths = channel_path::Entity::find()
.from_raw_sql(Statement::from_sql_and_values(
self.pool.get_database_backend(),
sql,
[from.to_proto().into(), channel.to_proto().into()],
);
tx.execute(channel_paths_stmt).await?;
))
.all(&*tx)
.await?;
let is_stranded = channel_path::Entity::find()
.filter(channel_path::Column::ChannelId.eq(channel))
.count(&*tx)
.await?
== 0;
// Make sure that there is always at least one path to the channel
let sql = r#"
INSERT INTO channel_paths
(id_path, channel_id)
SELECT
'/' || $1 || '/', $2
WHERE NOT EXISTS
(SELECT *
FROM channel_paths
WHERE channel_id = $2)
"#;
let channel_paths_stmt = Statement::from_sql_and_values(
self.pool.get_database_backend(),
sql,
[channel.to_proto().into(), channel.to_proto().into()],
);
tx.execute(channel_paths_stmt).await?;
if is_stranded {
let root_paths: Vec<_> = paths
.iter()
.map(|path| {
let start_offset = path.id_path.find(&format!("/{}/", channel)).unwrap();
channel_path::ActiveModel {
channel_id: ActiveValue::Set(path.channel_id),
id_path: ActiveValue::Set(path.id_path[start_offset..].to_string()),
}
})
.collect();
channel_path::Entity::insert_many(root_paths)
.exec(&*tx)
.await?;
}
Ok(())
}
@ -978,6 +971,13 @@ impl Database {
from: ChannelId,
to: ChannelId,
) -> Result<ChannelGraph> {
if from == to {
return Ok(ChannelGraph {
channels: vec![],
edges: vec![],
});
}
self.transaction(|tx| async move {
self.check_user_is_channel_admin(channel, user, &*tx)
.await?;

View file

@ -791,6 +791,64 @@ async fn test_db_channel_moving(db: &Arc<Database>) {
assert!(result.channels.is_empty())
}
test_both_dbs!(
test_db_channel_moving_bugs,
test_db_channel_moving_bugs_postgres,
test_db_channel_moving_bugs_sqlite
);
async fn test_db_channel_moving_bugs(db: &Arc<Database>) {
let user_id = db
.create_user(
"user1@example.com",
false,
NewUserParams {
github_login: "user1".into(),
github_user_id: 5,
invite_count: 0,
},
)
.await
.unwrap()
.user_id;
let zed_id = db.create_root_channel("zed", "1", user_id).await.unwrap();
let projects_id = db
.create_channel("projects", Some(zed_id), "2", user_id)
.await
.unwrap();
let livestreaming_id = db
.create_channel("livestreaming", Some(projects_id), "3", user_id)
.await
.unwrap();
// Dag is: zed - projects - livestreaming
// Move to same parent should be a no-op
assert!(db
.move_channel(user_id, projects_id, zed_id, zed_id)
.await
.unwrap()
.is_empty());
// Stranding a channel should retain it's sub channels
db.unlink_channel(user_id, projects_id, zed_id)
.await
.unwrap();
let result = db.get_channels_for_user(user_id).await.unwrap();
assert_dag(
result.channels,
&[
(zed_id, None),
(projects_id, None),
(livestreaming_id, Some(projects_id)),
],
);
}
#[track_caller]
fn assert_dag(actual: ChannelGraph, expected: &[(ChannelId, Option<ChannelId>)]) {
let mut actual_map: HashMap<ChannelId, HashSet<ChannelId>> = HashMap::default();

View file

@ -2474,6 +2474,11 @@ async fn move_channel(
.move_channel(session.user_id, channel_id, from_parent, to)
.await?;
if channels_to_send.is_empty() {
response.send(Ack {})?;
return Ok(());
}
let members_from = db.get_channel_members(from_parent).await?;
let members_to = db.get_channel_members(to).await?;

View file

@ -145,8 +145,6 @@ async fn test_core_channels(
],
);
println!("STARTING CREATE CHANNEL C");
let channel_c_id = client_a
.channel_store()
.update(cx_a, |channel_store, cx| {
@ -1028,10 +1026,6 @@ async fn test_channel_moving(
// - ep
assert_channels_list_shape(client_c.channel_store(), cx_c, &[(channel_ep_id, 0)]);
println!("*******************************************");
println!("********** STARTING LINK CHANNEL **********");
println!("*******************************************");
dbg!(client_b.user_id());
client_b
.channel_store()
.update(cx_b, |channel_store, cx| {
@ -1199,5 +1193,5 @@ fn assert_channels_list_shape(
.map(|(depth, channel)| (channel.id, depth))
.collect::<Vec<_>>()
});
pretty_assertions::assert_eq!(dbg!(actual), expected_channels);
pretty_assertions::assert_eq!(actual, expected_channels);
}

View file

@ -1787,7 +1787,7 @@ impl CollabPanel {
is_dragged_over = true;
}
MouseEventHandler::new::<Channel, _>(path.unique_id() as usize, cx, |state, cx| {
MouseEventHandler::new::<Channel, _>(ix, cx, |state, cx| {
let row_hovered = state.hovered();
let mut select_state = |interactive: &Interactive<ContainerStyle>| {
@ -1822,10 +1822,7 @@ impl CollabPanel {
.flex(1., true),
)
.with_child(
MouseEventHandler::new::<ChannelCall, _>(
channel.id as usize,
cx,
move |_, cx| {
MouseEventHandler::new::<ChannelCall, _>(ix, cx, move |_, cx| {
let participants =
self.channel_store.read(cx).channel_participants(channel_id);
if !participants.is_empty() {
@ -1861,8 +1858,7 @@ impl CollabPanel {
} else {
Empty::new().into_any()
}
},
)
})
.on_click(MouseButton::Left, move |_, this, cx| {
this.join_channel_call(channel_id, cx);
}),
@ -1875,7 +1871,7 @@ impl CollabPanel {
location: path.clone(),
}),
)
.with_id(path.unique_id() as usize)
.with_id(ix)
.with_style(theme.disclosure.clone())
.element()
.constrained()
@ -1955,11 +1951,11 @@ impl CollabPanel {
})
.as_draggable(
(channel.clone(), path.parent_id()),
move |e, (channel, _), cx: &mut ViewContext<Workspace>| {
move |modifiers, (channel, _), cx: &mut ViewContext<Workspace>| {
let theme = &theme::current(cx).collab_panel;
Flex::<Workspace>::row()
.with_children(e.alt.then(|| {
.with_children(modifiers.alt.then(|| {
Svg::new("icons/plus.svg")
.with_color(theme.channel_hash.color)
.constrained()

View file

@ -364,9 +364,15 @@ impl<V: 'static> Draggable<V> for MouseEventHandler<V> {
DragAndDrop::<D>::drag_started(e, cx);
})
.on_drag(MouseButton::Left, move |e, _, cx| {
if e.end {
cx.update_global::<DragAndDrop<D>, _, _>(|drag_and_drop, cx| {
drag_and_drop.finish_dragging(cx)
})
} else {
let payload = payload.clone();
let render = render.clone();
DragAndDrop::<D>::dragging(e, payload, cx, render)
}
})
}
}