mirror of
https://github.com/zed-industries/zed.git
synced 2024-12-26 10:40:54 +00:00
Merge branch 'main' into piotr/z-2588-php
This commit is contained in:
commit
11173b2199
103 changed files with 4894 additions and 725 deletions
14
.github/workflows/ci.yml
vendored
14
.github/workflows/ci.yml
vendored
|
@ -29,7 +29,7 @@ jobs:
|
|||
rustup update stable
|
||||
|
||||
- name: Checkout repo
|
||||
uses: actions/checkout@v2
|
||||
uses: actions/checkout@v3
|
||||
with:
|
||||
clean: false
|
||||
submodules: 'recursive'
|
||||
|
@ -54,12 +54,12 @@ jobs:
|
|||
cargo install cargo-nextest
|
||||
|
||||
- name: Install Node
|
||||
uses: actions/setup-node@v2
|
||||
uses: actions/setup-node@v3
|
||||
with:
|
||||
node-version: '18'
|
||||
|
||||
- name: Checkout repo
|
||||
uses: actions/checkout@v2
|
||||
uses: actions/checkout@v3
|
||||
with:
|
||||
clean: false
|
||||
submodules: 'recursive'
|
||||
|
@ -104,12 +104,12 @@ jobs:
|
|||
rustup target add wasm32-wasi
|
||||
|
||||
- name: Install Node
|
||||
uses: actions/setup-node@v2
|
||||
uses: actions/setup-node@v3
|
||||
with:
|
||||
node-version: '18'
|
||||
|
||||
- name: Checkout repo
|
||||
uses: actions/checkout@v2
|
||||
uses: actions/checkout@v3
|
||||
with:
|
||||
clean: false
|
||||
submodules: 'recursive'
|
||||
|
@ -148,8 +148,8 @@ jobs:
|
|||
- name: Create app bundle
|
||||
run: script/bundle
|
||||
|
||||
- name: Upload app bundle to workflow run if main branch or specifi label
|
||||
uses: actions/upload-artifact@v2
|
||||
- name: Upload app bundle to workflow run if main branch or specific label
|
||||
uses: actions/upload-artifact@v3
|
||||
if: ${{ github.ref == 'refs/heads/main' }} || contains(github.event.pull_request.labels.*.name, 'run-build-dmg') }}
|
||||
with:
|
||||
name: Zed_${{ github.event.pull_request.head.sha || github.sha }}.dmg
|
||||
|
|
4
.github/workflows/randomized_tests.yml
vendored
4
.github/workflows/randomized_tests.yml
vendored
|
@ -29,12 +29,12 @@ jobs:
|
|||
rustup update stable
|
||||
|
||||
- name: Install Node
|
||||
uses: actions/setup-node@v2
|
||||
uses: actions/setup-node@v3
|
||||
with:
|
||||
node-version: '18'
|
||||
|
||||
- name: Checkout repo
|
||||
uses: actions/checkout@v2
|
||||
uses: actions/checkout@v3
|
||||
with:
|
||||
clean: false
|
||||
submodules: 'recursive'
|
||||
|
|
193
Cargo.lock
generated
193
Cargo.lock
generated
|
@ -118,7 +118,7 @@ dependencies = [
|
|||
"settings",
|
||||
"smol",
|
||||
"theme",
|
||||
"tiktoken-rs",
|
||||
"tiktoken-rs 0.4.2",
|
||||
"util",
|
||||
"workspace",
|
||||
]
|
||||
|
@ -161,7 +161,7 @@ dependencies = [
|
|||
"miow 0.3.7",
|
||||
"nix",
|
||||
"parking_lot 0.12.1",
|
||||
"regex-automata",
|
||||
"regex-automata 0.1.10",
|
||||
"serde",
|
||||
"serde_yaml",
|
||||
"signal-hook",
|
||||
|
@ -772,9 +772,9 @@ checksum = "9e1b586273c5702936fe7b7d6896644d8be71e6314cfe09d3167c95f712589e8"
|
|||
|
||||
[[package]]
|
||||
name = "base64"
|
||||
version = "0.21.0"
|
||||
version = "0.21.2"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "a4a4ddaa51a5bc52a6948f74c06d20aaaddb71924eab79b8c97a8c556e942d6a"
|
||||
checksum = "604178f6c5c21f02dc555784810edfb88d34ac2c73b2eae109655649ee73ce3d"
|
||||
|
||||
[[package]]
|
||||
name = "base64ct"
|
||||
|
@ -969,13 +969,12 @@ dependencies = [
|
|||
|
||||
[[package]]
|
||||
name = "bstr"
|
||||
version = "1.4.0"
|
||||
version = "1.6.0"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "c3d4260bcc2e8fc9df1eac4919a720effeb63a3f0952f5bf4944adfa18897f09"
|
||||
checksum = "6798148dccfbff0fae41c7574d2fa8f1ef3492fba0face179de5d8d447d67b05"
|
||||
dependencies = [
|
||||
"memchr",
|
||||
"once_cell",
|
||||
"regex-automata",
|
||||
"regex-automata 0.3.3",
|
||||
"serde",
|
||||
]
|
||||
|
||||
|
@ -1491,6 +1490,7 @@ dependencies = [
|
|||
"theme",
|
||||
"theme_selector",
|
||||
"util",
|
||||
"vcs_menu",
|
||||
"workspace",
|
||||
"zed-actions",
|
||||
]
|
||||
|
@ -1990,7 +1990,6 @@ checksum = "14d05c10f541ae6f3bc5b3d923c20001f47db7d5f0b2bc6ad16490133842db79"
|
|||
dependencies = [
|
||||
"cc",
|
||||
"libc",
|
||||
"libnghttp2-sys",
|
||||
"libz-sys",
|
||||
"openssl-sys",
|
||||
"pkg-config",
|
||||
|
@ -2312,9 +2311,8 @@ dependencies = [
|
|||
"theme",
|
||||
"tree-sitter",
|
||||
"tree-sitter-html",
|
||||
"tree-sitter-javascript",
|
||||
"tree-sitter-rust",
|
||||
"tree-sitter-typescript 0.20.2 (git+https://github.com/tree-sitter/tree-sitter-typescript?rev=5d20856f34315b068c41edaee2ac8a100081d259)",
|
||||
"tree-sitter-typescript",
|
||||
"unindent",
|
||||
"util",
|
||||
"workspace",
|
||||
|
@ -2448,6 +2446,12 @@ version = "0.2.0"
|
|||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "4443176a9f2c162692bd3d352d745ef9413eec5782a80d8fd6f8a1ac692a07f7"
|
||||
|
||||
[[package]]
|
||||
name = "fallible-streaming-iterator"
|
||||
version = "0.1.9"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "7360491ce676a36bf9bb3c56c1aa791658183a54d2744120f27285738d90465a"
|
||||
|
||||
[[package]]
|
||||
name = "fancy-regex"
|
||||
version = "0.11.0"
|
||||
|
@ -3145,6 +3149,15 @@ version = "0.14.0"
|
|||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "2c6201b9ff9fd90a5a3bac2e56a830d0caa509576f0e503818ee82c181b3437a"
|
||||
|
||||
[[package]]
|
||||
name = "hashlink"
|
||||
version = "0.7.0"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "7249a3129cbc1ffccd74857f81464a323a152173cdb134e0fd81bc803b29facf"
|
||||
dependencies = [
|
||||
"hashbrown 0.11.2",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "hashlink"
|
||||
version = "0.8.1"
|
||||
|
@ -3786,15 +3799,16 @@ dependencies = [
|
|||
"text",
|
||||
"theme",
|
||||
"tree-sitter",
|
||||
"tree-sitter-elixir",
|
||||
"tree-sitter-embedded-template",
|
||||
"tree-sitter-heex",
|
||||
"tree-sitter-html",
|
||||
"tree-sitter-javascript",
|
||||
"tree-sitter-json 0.19.0",
|
||||
"tree-sitter-json 0.20.0",
|
||||
"tree-sitter-markdown",
|
||||
"tree-sitter-python",
|
||||
"tree-sitter-ruby",
|
||||
"tree-sitter-rust",
|
||||
"tree-sitter-typescript 0.20.2 (registry+https://github.com/rust-lang/crates.io-index)",
|
||||
"tree-sitter-typescript",
|
||||
"unicase",
|
||||
"unindent",
|
||||
"util",
|
||||
|
@ -3906,16 +3920,6 @@ version = "0.2.6"
|
|||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "348108ab3fba42ec82ff6e9564fc4ca0247bdccdc68dd8af9764bbc79c3c8ffb"
|
||||
|
||||
[[package]]
|
||||
name = "libnghttp2-sys"
|
||||
version = "0.1.7+1.45.0"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "57ed28aba195b38d5ff02b9170cbff627e336a20925e43b4945390401c5dc93f"
|
||||
dependencies = [
|
||||
"cc",
|
||||
"libc",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "libsqlite3-sys"
|
||||
version = "0.24.2"
|
||||
|
@ -4127,7 +4131,7 @@ version = "0.1.0"
|
|||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "8263075bb86c5a1b1427b5ae862e8889656f126e9f77c484496e8b47cf5c5558"
|
||||
dependencies = [
|
||||
"regex-automata",
|
||||
"regex-automata 0.1.10",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
|
@ -4142,6 +4146,16 @@ version = "0.5.0"
|
|||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "73cbba799671b762df5a175adf59ce145165747bb891505c43d09aefbbf38beb"
|
||||
|
||||
[[package]]
|
||||
name = "matrixmultiply"
|
||||
version = "0.3.7"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "090126dc04f95dc0d1c1c91f61bdd474b3930ca064c1edc8a849da2c6cbe1e77"
|
||||
dependencies = [
|
||||
"autocfg 1.1.0",
|
||||
"rawpointer",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "maybe-owned"
|
||||
version = "0.3.4"
|
||||
|
@ -5099,7 +5113,7 @@ version = "1.4.3"
|
|||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "9bd9647b268a3d3e14ff09c23201133a62589c658db02bb7388c7246aafe0590"
|
||||
dependencies = [
|
||||
"base64 0.21.0",
|
||||
"base64 0.21.2",
|
||||
"indexmap 1.9.3",
|
||||
"line-wrap",
|
||||
"quick-xml",
|
||||
|
@ -5661,6 +5675,12 @@ version = "0.5.2"
|
|||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "f2ff9a1f06a88b01621b7ae906ef0211290d1c8a168a15542486a8f61c0833b9"
|
||||
|
||||
[[package]]
|
||||
name = "rawpointer"
|
||||
version = "0.2.1"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "60a357793950651c4ed0f3f52338f53b2f809f32d83a07f72909fa13e4c6c1e3"
|
||||
|
||||
[[package]]
|
||||
name = "rayon"
|
||||
version = "1.7.0"
|
||||
|
@ -5780,6 +5800,12 @@ dependencies = [
|
|||
"regex-syntax 0.6.29",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "regex-automata"
|
||||
version = "0.3.3"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "39354c10dd07468c2e73926b23bb9c2caca74c5501e38a35da70406f1d923310"
|
||||
|
||||
[[package]]
|
||||
name = "regex-syntax"
|
||||
version = "0.6.29"
|
||||
|
@ -5828,7 +5854,7 @@ version = "0.11.17"
|
|||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "13293b639a097af28fc8a90f22add145a9c954e49d77da06263d58cf44d5fb91"
|
||||
dependencies = [
|
||||
"base64 0.21.0",
|
||||
"base64 0.21.2",
|
||||
"bytes 1.4.0",
|
||||
"encoding_rs",
|
||||
"futures-core",
|
||||
|
@ -6030,6 +6056,21 @@ dependencies = [
|
|||
"zeroize",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "rusqlite"
|
||||
version = "0.27.0"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "85127183a999f7db96d1a976a309eebbfb6ea3b0b400ddd8340190129de6eb7a"
|
||||
dependencies = [
|
||||
"bitflags",
|
||||
"fallible-iterator",
|
||||
"fallible-streaming-iterator",
|
||||
"hashlink 0.7.0",
|
||||
"libsqlite3-sys",
|
||||
"memchr",
|
||||
"smallvec",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "rust-embed"
|
||||
version = "6.6.1"
|
||||
|
@ -6165,7 +6206,7 @@ version = "1.0.2"
|
|||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "d194b56d58803a43635bdc398cd17e383d6f71f9182b9a192c127ca42494a59b"
|
||||
dependencies = [
|
||||
"base64 0.21.0",
|
||||
"base64 0.21.2",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
|
@ -6961,7 +7002,7 @@ dependencies = [
|
|||
"futures-executor",
|
||||
"futures-intrusive",
|
||||
"futures-util",
|
||||
"hashlink",
|
||||
"hashlink 0.8.1",
|
||||
"hex",
|
||||
"hkdf",
|
||||
"hmac 0.12.1",
|
||||
|
@ -7465,7 +7506,22 @@ source = "registry+https://github.com/rust-lang/crates.io-index"
|
|||
checksum = "8ba161c549e2c0686f35f5d920e63fad5cafba2c28ad2caceaf07e5d9fa6e8c4"
|
||||
dependencies = [
|
||||
"anyhow",
|
||||
"base64 0.21.0",
|
||||
"base64 0.21.2",
|
||||
"bstr",
|
||||
"fancy-regex",
|
||||
"lazy_static",
|
||||
"parking_lot 0.12.1",
|
||||
"rustc-hash",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "tiktoken-rs"
|
||||
version = "0.5.0"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "1a99d843674a3468b4a9200a565bbe909a0152f95e82a52feae71e6bf2d4b49d"
|
||||
dependencies = [
|
||||
"anyhow",
|
||||
"base64 0.21.2",
|
||||
"bstr",
|
||||
"fancy-regex",
|
||||
"lazy_static",
|
||||
|
@ -7956,16 +8012,6 @@ dependencies = [
|
|||
"tree-sitter",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "tree-sitter-javascript"
|
||||
version = "0.20.0"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "2490fab08630b2c8943c320f7b63473cbf65511c8d83aec551beb9b4375906ed"
|
||||
dependencies = [
|
||||
"cc",
|
||||
"tree-sitter",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "tree-sitter-json"
|
||||
version = "0.19.0"
|
||||
|
@ -8062,19 +8108,18 @@ dependencies = [
|
|||
]
|
||||
|
||||
[[package]]
|
||||
name = "tree-sitter-toml"
|
||||
version = "0.5.1"
|
||||
source = "git+https://github.com/tree-sitter/tree-sitter-toml?rev=342d9be207c2dba869b9967124c679b5e6fd0ebe#342d9be207c2dba869b9967124c679b5e6fd0ebe"
|
||||
name = "tree-sitter-svelte"
|
||||
version = "0.10.2"
|
||||
source = "git+https://github.com/Himujjal/tree-sitter-svelte?rev=697bb515471871e85ff799ea57a76298a71a9cca#697bb515471871e85ff799ea57a76298a71a9cca"
|
||||
dependencies = [
|
||||
"cc",
|
||||
"tree-sitter",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "tree-sitter-typescript"
|
||||
version = "0.20.2"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "079c695c32d39ad089101c66393aeaca30e967fba3486a91f573d2f0e12d290a"
|
||||
name = "tree-sitter-toml"
|
||||
version = "0.5.1"
|
||||
source = "git+https://github.com/tree-sitter/tree-sitter-toml?rev=342d9be207c2dba869b9967124c679b5e6fd0ebe#342d9be207c2dba869b9967124c679b5e6fd0ebe"
|
||||
dependencies = [
|
||||
"cc",
|
||||
"tree-sitter",
|
||||
|
@ -8386,6 +8431,54 @@ version = "0.2.15"
|
|||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "accd4ea62f7bb7a82fe23066fb0957d48ef677f6eeb8215f372f52e48bb32426"
|
||||
|
||||
[[package]]
|
||||
name = "vcs_menu"
|
||||
version = "0.1.0"
|
||||
dependencies = [
|
||||
"anyhow",
|
||||
"fuzzy",
|
||||
"gpui",
|
||||
"picker",
|
||||
"theme",
|
||||
"util",
|
||||
"workspace",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "vector_store"
|
||||
version = "0.1.0"
|
||||
dependencies = [
|
||||
"anyhow",
|
||||
"async-trait",
|
||||
"bincode",
|
||||
"editor",
|
||||
"futures 0.3.28",
|
||||
"gpui",
|
||||
"isahc",
|
||||
"language",
|
||||
"lazy_static",
|
||||
"log",
|
||||
"matrixmultiply",
|
||||
"picker",
|
||||
"project",
|
||||
"rand 0.8.5",
|
||||
"rpc",
|
||||
"rusqlite",
|
||||
"schemars",
|
||||
"serde",
|
||||
"serde_json",
|
||||
"settings",
|
||||
"smol",
|
||||
"tempdir",
|
||||
"theme",
|
||||
"tiktoken-rs 0.5.0",
|
||||
"tree-sitter",
|
||||
"tree-sitter-rust",
|
||||
"unindent",
|
||||
"util",
|
||||
"workspace",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "version_check"
|
||||
version = "0.9.4"
|
||||
|
@ -9346,7 +9439,7 @@ dependencies = [
|
|||
|
||||
[[package]]
|
||||
name = "zed"
|
||||
version = "0.95.0"
|
||||
version = "0.96.0"
|
||||
dependencies = [
|
||||
"activity_indicator",
|
||||
"ai",
|
||||
|
@ -9447,14 +9540,16 @@ dependencies = [
|
|||
"tree-sitter-ruby",
|
||||
"tree-sitter-rust",
|
||||
"tree-sitter-scheme",
|
||||
"tree-sitter-svelte",
|
||||
"tree-sitter-toml",
|
||||
"tree-sitter-typescript 0.20.2 (git+https://github.com/tree-sitter/tree-sitter-typescript?rev=5d20856f34315b068c41edaee2ac8a100081d259)",
|
||||
"tree-sitter-typescript",
|
||||
"tree-sitter-yaml",
|
||||
"unindent",
|
||||
"url",
|
||||
"urlencoding",
|
||||
"util",
|
||||
"uuid 1.3.2",
|
||||
"vector_store",
|
||||
"vim",
|
||||
"welcome",
|
||||
"workspace",
|
||||
|
|
27
Cargo.toml
27
Cargo.toml
|
@ -63,7 +63,9 @@ members = [
|
|||
"crates/theme",
|
||||
"crates/theme_selector",
|
||||
"crates/util",
|
||||
"crates/vector_store",
|
||||
"crates/vim",
|
||||
"crates/vcs_menu",
|
||||
"crates/workspace",
|
||||
"crates/welcome",
|
||||
"crates/xtask",
|
||||
|
@ -81,7 +83,8 @@ env_logger = { version = "0.9" }
|
|||
futures = { version = "0.3" }
|
||||
globset = { version = "0.4" }
|
||||
indoc = "1"
|
||||
isahc = "1.7.2"
|
||||
# We explicitly disable a http2 support in isahc.
|
||||
isahc = { version = "1.7.2", default-features = false, features = ["static-curl", "text-decoding"] }
|
||||
lazy_static = { version = "1.4.0" }
|
||||
log = { version = "0.4.16", features = ["kv_unstable_serde"] }
|
||||
ordered-float = { version = "2.1.1" }
|
||||
|
@ -104,6 +107,28 @@ tree-sitter = "0.20"
|
|||
unindent = { version = "0.1.7" }
|
||||
pretty_assertions = "1.3.0"
|
||||
|
||||
tree-sitter-c = "0.20.1"
|
||||
tree-sitter-cpp = "0.20.0"
|
||||
tree-sitter-css = { git = "https://github.com/tree-sitter/tree-sitter-css", rev = "769203d0f9abe1a9a691ac2b9fe4bb4397a73c51" }
|
||||
tree-sitter-elixir = { git = "https://github.com/elixir-lang/tree-sitter-elixir", rev = "4ba9dab6e2602960d95b2b625f3386c27e08084e" }
|
||||
tree-sitter-embedded-template = "0.20.0"
|
||||
tree-sitter-go = { git = "https://github.com/tree-sitter/tree-sitter-go", rev = "aeb2f33b366fd78d5789ff104956ce23508b85db" }
|
||||
tree-sitter-heex = { git = "https://github.com/phoenixframework/tree-sitter-heex", rev = "2e1348c3cf2c9323e87c2744796cf3f3868aa82a" }
|
||||
tree-sitter-json = { git = "https://github.com/tree-sitter/tree-sitter-json", rev = "40a81c01a40ac48744e0c8ccabbaba1920441199" }
|
||||
tree-sitter-rust = "0.20.3"
|
||||
tree-sitter-markdown = { git = "https://github.com/MDeiml/tree-sitter-markdown", rev = "330ecab87a3e3a7211ac69bbadc19eabecdb1cca" }
|
||||
tree-sitter-php = { git = "https://github.com/tree-sitter/tree-sitter-php", rev = "d38adb26304d9b9d38e9a3b4aae0ec4b29bf9462" }
|
||||
tree-sitter-python = "0.20.2"
|
||||
tree-sitter-toml = { git = "https://github.com/tree-sitter/tree-sitter-toml", rev = "342d9be207c2dba869b9967124c679b5e6fd0ebe" }
|
||||
tree-sitter-typescript = { git = "https://github.com/tree-sitter/tree-sitter-typescript", rev = "5d20856f34315b068c41edaee2ac8a100081d259" }
|
||||
tree-sitter-ruby = "0.20.0"
|
||||
tree-sitter-html = "0.19.0"
|
||||
tree-sitter-scheme = { git = "https://github.com/6cdh/tree-sitter-scheme", rev = "af0fd1fa452cb2562dc7b5c8a8c55551c39273b9"}
|
||||
tree-sitter-svelte = { git = "https://github.com/Himujjal/tree-sitter-svelte", rev = "697bb515471871e85ff799ea57a76298a71a9cca"}
|
||||
tree-sitter-racket = { git = "https://github.com/zed-industries/tree-sitter-racket", rev = "eb010cf2c674c6fd9a6316a84e28ef90190fe51a"}
|
||||
tree-sitter-yaml = { git = "https://github.com/zed-industries/tree-sitter-yaml", rev = "f545a41f57502e1b5ddf2a6668896c1b0620f930"}
|
||||
tree-sitter-lua = "0.0.14"
|
||||
|
||||
[patch.crates-io]
|
||||
tree-sitter = { git = "https://github.com/tree-sitter/tree-sitter", rev = "49226023693107fba9a1191136a4f47f38cdca73" }
|
||||
async-task = { git = "https://github.com/zed-industries/async-task", rev = "341b57d6de98cdfd7b418567b8de2022ca993a6e" }
|
||||
|
|
|
@ -16,22 +16,25 @@ Welcome to Zed, a lightning-fast, collaborative code editor that makes your drea
|
|||
brew install foreman
|
||||
```
|
||||
|
||||
* Ensure the Zed.dev website is checked out in a sibling directory:
|
||||
* Ensure the Zed.dev website is checked out in a sibling directory and install it's dependencies:
|
||||
|
||||
```
|
||||
cd ..
|
||||
git clone https://github.com/zed-industries/zed.dev
|
||||
cd zed.dev && npm install
|
||||
npm install -g vercel
|
||||
```
|
||||
|
||||
* Initialize submodules
|
||||
* Return to Zed project directory and Initialize submodules
|
||||
|
||||
```
|
||||
cd zed
|
||||
git submodule update --init --recursive
|
||||
```
|
||||
|
||||
* Set up a local `zed` database and seed it with some initial users:
|
||||
|
||||
Create a personal GitHub token to run `script/bootstrap` once successfully: the token needs to have an access to private repositories for the script to work (`repo` OAuth scope).
|
||||
[Create a personal GitHub token](https://github.com/settings/tokens/new) to run `script/bootstrap` once successfully: the token needs to have an access to private repositories for the script to work (`repo` OAuth scope).
|
||||
Then delete that token.
|
||||
|
||||
```
|
||||
|
|
|
@ -9,6 +9,7 @@
|
|||
"context": "Editor",
|
||||
"bindings": {
|
||||
"cmd-b": "editor::GoToDefinition",
|
||||
"alt-cmd-b": "editor::GoToDefinitionSplit",
|
||||
"cmd-<": "editor::ScrollCursorCenter",
|
||||
"cmd-g": [
|
||||
"editor::SelectNext",
|
||||
|
|
|
@ -13,6 +13,7 @@
|
|||
"cmd-up": "menu::SelectFirst",
|
||||
"cmd-down": "menu::SelectLast",
|
||||
"enter": "menu::Confirm",
|
||||
"cmd-enter": "menu::SecondaryConfirm",
|
||||
"escape": "menu::Cancel",
|
||||
"ctrl-c": "menu::Cancel",
|
||||
"cmd-{": "pane::ActivatePrevItem",
|
||||
|
@ -39,6 +40,7 @@
|
|||
"cmd-shift-n": "workspace::NewWindow",
|
||||
"cmd-o": "workspace::Open",
|
||||
"alt-cmd-o": "projects::OpenRecent",
|
||||
"alt-cmd-b": "branches::OpenRecent",
|
||||
"ctrl-~": "workspace::NewTerminal",
|
||||
"ctrl-`": "terminal_panel::ToggleFocus",
|
||||
"shift-escape": "workspace::ToggleZoom"
|
||||
|
@ -193,8 +195,8 @@
|
|||
{
|
||||
"context": "Editor && mode == auto_height",
|
||||
"bindings": {
|
||||
"alt-enter": "editor::Newline",
|
||||
"cmd-alt-enter": "editor::NewlineBelow"
|
||||
"shift-enter": "editor::Newline",
|
||||
"cmd-shift-enter": "editor::NewlineBelow"
|
||||
}
|
||||
},
|
||||
{
|
||||
|
@ -220,7 +222,8 @@
|
|||
"escape": "buffer_search::Dismiss",
|
||||
"tab": "buffer_search::FocusEditor",
|
||||
"enter": "search::SelectNextMatch",
|
||||
"shift-enter": "search::SelectPrevMatch"
|
||||
"shift-enter": "search::SelectPrevMatch",
|
||||
"alt-enter": "search::SelectAllMatches"
|
||||
}
|
||||
},
|
||||
{
|
||||
|
@ -241,6 +244,7 @@
|
|||
"cmd-f": "project_search::ToggleFocus",
|
||||
"cmd-g": "search::SelectNextMatch",
|
||||
"cmd-shift-g": "search::SelectPrevMatch",
|
||||
"alt-enter": "search::SelectAllMatches",
|
||||
"alt-cmd-c": "search::ToggleCaseSensitive",
|
||||
"alt-cmd-w": "search::ToggleWholeWord",
|
||||
"alt-cmd-r": "search::ToggleRegex"
|
||||
|
@ -295,7 +299,9 @@
|
|||
"shift-f8": "editor::GoToPrevDiagnostic",
|
||||
"f2": "editor::Rename",
|
||||
"f12": "editor::GoToDefinition",
|
||||
"alt-f12": "editor::GoToDefinitionSplit",
|
||||
"cmd-f12": "editor::GoToTypeDefinition",
|
||||
"alt-cmd-f12": "editor::GoToTypeDefinitionSplit",
|
||||
"alt-shift-f12": "editor::FindAllReferences",
|
||||
"ctrl-m": "editor::MoveToEnclosingBracket",
|
||||
"alt-cmd-[": "editor::Fold",
|
||||
|
@ -404,6 +410,7 @@
|
|||
"cmd-k cmd-t": "theme_selector::Toggle",
|
||||
"cmd-k cmd-s": "zed::OpenKeymap",
|
||||
"cmd-t": "project_symbols::Toggle",
|
||||
"cmd-ctrl-t": "semantic_search::Toggle",
|
||||
"cmd-p": "file_finder::Toggle",
|
||||
"cmd-shift-p": "command_palette::Toggle",
|
||||
"cmd-shift-m": "diagnostics::Deploy",
|
||||
|
|
|
@ -46,8 +46,9 @@
|
|||
"alt-f7": "editor::FindAllReferences",
|
||||
"cmd-alt-f7": "editor::FindAllReferences",
|
||||
"cmd-b": "editor::GoToDefinition",
|
||||
"cmd-alt-b": "editor::GoToDefinition",
|
||||
"cmd-alt-b": "editor::GoToDefinitionSplit",
|
||||
"cmd-shift-b": "editor::GoToTypeDefinition",
|
||||
"cmd-alt-shift-b": "editor::GoToTypeDefinitionSplit",
|
||||
"alt-enter": "editor::ToggleCodeActions",
|
||||
"f2": "editor::GoToDiagnostic",
|
||||
"cmd-f2": "editor::GoToPrevDiagnostic",
|
||||
|
|
|
@ -20,6 +20,7 @@
|
|||
"cmd-shift-a": "editor::SelectLargerSyntaxNode",
|
||||
"shift-f12": "editor::FindAllReferences",
|
||||
"alt-cmd-down": "editor::GoToDefinition",
|
||||
"ctrl-alt-cmd-down": "editor::GoToDefinitionSplit",
|
||||
"alt-shift-cmd-down": "editor::FindAllReferences",
|
||||
"ctrl-.": "editor::GoToHunk",
|
||||
"ctrl-,": "editor::GoToPrevHunk",
|
||||
|
|
|
@ -2,6 +2,7 @@
|
|||
{
|
||||
"bindings": {
|
||||
"cmd-shift-o": "projects::OpenRecent",
|
||||
"cmd-shift-b": "branches::OpenRecent",
|
||||
"cmd-alt-tab": "project_panel::ToggleFocus"
|
||||
}
|
||||
},
|
||||
|
@ -11,6 +12,7 @@
|
|||
"cmd-l": "go_to_line::Toggle",
|
||||
"ctrl-shift-d": "editor::DuplicateLine",
|
||||
"cmd-b": "editor::GoToDefinition",
|
||||
"alt-cmd-b": "editor::GoToDefinition",
|
||||
"cmd-j": "editor::ScrollCursorCenter",
|
||||
"cmd-shift-l": "editor::SelectLine",
|
||||
"cmd-shift-t": "outline::Toggle",
|
||||
|
|
|
@ -24,6 +24,17 @@
|
|||
},
|
||||
// The default font size for text in the editor
|
||||
"buffer_font_size": 15,
|
||||
// Set the buffer's line height.
|
||||
// May take 3 values:
|
||||
// 1. Use a line height that's comfortable for reading (1.618)
|
||||
// "line_height": "comfortable"
|
||||
// 2. Use a standard line height, (1.3)
|
||||
// "line_height": "standard",
|
||||
// 3. Use a custom line height
|
||||
// "line_height": {
|
||||
// "custom": 2
|
||||
// },
|
||||
"buffer_line_height": "comfortable",
|
||||
// The factor to grow the active pane by. Defaults to 1.0
|
||||
// which gives the same size as all other panes.
|
||||
"active_pane_magnification": 1.0,
|
||||
|
@ -282,7 +293,6 @@
|
|||
// "line_height": {
|
||||
// "custom": 2
|
||||
// },
|
||||
//
|
||||
"line_height": "comfortable"
|
||||
// Set the terminal's font size. If this option is not included,
|
||||
// the terminal will default to matching the buffer's font size.
|
||||
|
@ -291,6 +301,11 @@
|
|||
// the terminal will default to matching the buffer's font family.
|
||||
// "font_family": "Zed Mono"
|
||||
},
|
||||
// Difference settings for vector_store
|
||||
"vector_store": {
|
||||
"enabled": false,
|
||||
"reindexing_delay_seconds": 600
|
||||
},
|
||||
// Different settings for specific languages.
|
||||
"languages": {
|
||||
"Plain Text": {
|
||||
|
|
|
@ -4,7 +4,7 @@ pub mod room;
|
|||
use std::sync::Arc;
|
||||
|
||||
use anyhow::{anyhow, Result};
|
||||
use client::{proto, Client, TypedEnvelope, User, UserStore};
|
||||
use client::{proto, ClickhouseEvent, Client, TelemetrySettings, TypedEnvelope, User, UserStore};
|
||||
use collections::HashSet;
|
||||
use futures::{future::Shared, FutureExt};
|
||||
use postage::watch;
|
||||
|
@ -198,6 +198,7 @@ impl ActiveCall {
|
|||
let result = invite.await;
|
||||
this.update(&mut cx, |this, cx| {
|
||||
this.pending_invites.remove(&called_user_id);
|
||||
this.report_call_event("invite", cx);
|
||||
cx.notify();
|
||||
});
|
||||
result
|
||||
|
@ -243,21 +244,26 @@ impl ActiveCall {
|
|||
};
|
||||
|
||||
let join = Room::join(&call, self.client.clone(), self.user_store.clone(), cx);
|
||||
|
||||
cx.spawn(|this, mut cx| async move {
|
||||
let room = join.await?;
|
||||
this.update(&mut cx, |this, cx| this.set_room(Some(room.clone()), cx))
|
||||
.await?;
|
||||
this.update(&mut cx, |this, cx| {
|
||||
this.report_call_event("accept incoming", cx)
|
||||
});
|
||||
Ok(())
|
||||
})
|
||||
}
|
||||
|
||||
pub fn decline_incoming(&mut self) -> Result<()> {
|
||||
pub fn decline_incoming(&mut self, cx: &mut ModelContext<Self>) -> Result<()> {
|
||||
let call = self
|
||||
.incoming_call
|
||||
.0
|
||||
.borrow_mut()
|
||||
.take()
|
||||
.ok_or_else(|| anyhow!("no incoming call"))?;
|
||||
Self::report_call_event_for_room("decline incoming", call.room_id, &self.client, cx);
|
||||
self.client.send(proto::DeclineCall {
|
||||
room_id: call.room_id,
|
||||
})?;
|
||||
|
@ -266,6 +272,7 @@ impl ActiveCall {
|
|||
|
||||
pub fn hang_up(&mut self, cx: &mut ModelContext<Self>) -> Task<Result<()>> {
|
||||
cx.notify();
|
||||
self.report_call_event("hang up", cx);
|
||||
if let Some((room, _)) = self.room.take() {
|
||||
room.update(cx, |room, cx| room.leave(cx))
|
||||
} else {
|
||||
|
@ -273,12 +280,28 @@ impl ActiveCall {
|
|||
}
|
||||
}
|
||||
|
||||
pub fn toggle_screen_sharing(&self, cx: &mut AppContext) {
|
||||
if let Some(room) = self.room().cloned() {
|
||||
let toggle_screen_sharing = room.update(cx, |room, cx| {
|
||||
if room.is_screen_sharing() {
|
||||
self.report_call_event("disable screen share", cx);
|
||||
Task::ready(room.unshare_screen(cx))
|
||||
} else {
|
||||
self.report_call_event("enable screen share", cx);
|
||||
room.share_screen(cx)
|
||||
}
|
||||
});
|
||||
toggle_screen_sharing.detach_and_log_err(cx);
|
||||
}
|
||||
}
|
||||
|
||||
pub fn share_project(
|
||||
&mut self,
|
||||
project: ModelHandle<Project>,
|
||||
cx: &mut ModelContext<Self>,
|
||||
) -> Task<Result<u64>> {
|
||||
if let Some((room, _)) = self.room.as_ref() {
|
||||
self.report_call_event("share project", cx);
|
||||
room.update(cx, |room, cx| room.share_project(project, cx))
|
||||
} else {
|
||||
Task::ready(Err(anyhow!("no active call")))
|
||||
|
@ -291,6 +314,7 @@ impl ActiveCall {
|
|||
cx: &mut ModelContext<Self>,
|
||||
) -> Result<()> {
|
||||
if let Some((room, _)) = self.room.as_ref() {
|
||||
self.report_call_event("unshare project", cx);
|
||||
room.update(cx, |room, cx| room.unshare_project(project, cx))
|
||||
} else {
|
||||
Err(anyhow!("no active call"))
|
||||
|
@ -349,7 +373,29 @@ impl ActiveCall {
|
|||
self.room.as_ref().map(|(room, _)| room)
|
||||
}
|
||||
|
||||
pub fn client(&self) -> Arc<Client> {
|
||||
self.client.clone()
|
||||
}
|
||||
|
||||
pub fn pending_invites(&self) -> &HashSet<u64> {
|
||||
&self.pending_invites
|
||||
}
|
||||
|
||||
fn report_call_event(&self, operation: &'static str, cx: &AppContext) {
|
||||
if let Some(room) = self.room() {
|
||||
Self::report_call_event_for_room(operation, room.read(cx).id(), &self.client, cx)
|
||||
}
|
||||
}
|
||||
|
||||
pub fn report_call_event_for_room(
|
||||
operation: &'static str,
|
||||
room_id: u64,
|
||||
client: &Arc<Client>,
|
||||
cx: &AppContext,
|
||||
) {
|
||||
let telemetry = client.telemetry();
|
||||
let telemetry_settings = *settings::get::<TelemetrySettings>(cx);
|
||||
let event = ClickhouseEvent::Call { operation, room_id };
|
||||
telemetry.report_clickhouse_event(event, telemetry_settings);
|
||||
}
|
||||
}
|
||||
|
|
|
@ -70,6 +70,10 @@ pub enum ClickhouseEvent {
|
|||
suggestion_accepted: bool,
|
||||
file_extension: Option<String>,
|
||||
},
|
||||
Call {
|
||||
operation: &'static str,
|
||||
room_id: u64,
|
||||
},
|
||||
}
|
||||
|
||||
#[cfg(debug_assertions)]
|
||||
|
|
|
@ -157,7 +157,7 @@ async fn test_basic_calls(
|
|||
// User C receives the call, but declines it.
|
||||
let call_c = incoming_call_c.next().await.unwrap().unwrap();
|
||||
assert_eq!(call_c.calling_user.github_login, "user_b");
|
||||
active_call_c.update(cx_c, |call, _| call.decline_incoming().unwrap());
|
||||
active_call_c.update(cx_c, |call, cx| call.decline_incoming(cx).unwrap());
|
||||
assert!(incoming_call_c.next().await.unwrap().is_none());
|
||||
|
||||
deterministic.run_until_parked();
|
||||
|
@ -1080,7 +1080,7 @@ async fn test_calls_on_multiple_connections(
|
|||
|
||||
// User B declines the call on one of the two connections, causing both connections
|
||||
// to stop ringing.
|
||||
active_call_b2.update(cx_b2, |call, _| call.decline_incoming().unwrap());
|
||||
active_call_b2.update(cx_b2, |call, cx| call.decline_incoming(cx).unwrap());
|
||||
deterministic.run_until_parked();
|
||||
assert!(incoming_call_b1.next().await.unwrap().is_none());
|
||||
assert!(incoming_call_b2.next().await.unwrap().is_none());
|
||||
|
@ -5945,7 +5945,7 @@ async fn test_contacts(
|
|||
[("user_b".to_string(), "online", "busy")]
|
||||
);
|
||||
|
||||
active_call_b.update(cx_b, |call, _| call.decline_incoming().unwrap());
|
||||
active_call_b.update(cx_b, |call, cx| call.decline_incoming(cx).unwrap());
|
||||
deterministic.run_until_parked();
|
||||
assert_eq!(
|
||||
contacts(&client_a, cx_a),
|
||||
|
@ -7217,7 +7217,7 @@ async fn test_peers_following_each_other(
|
|||
|
||||
// Clients A and B follow each other in split panes
|
||||
workspace_a.update(cx_a, |workspace, cx| {
|
||||
workspace.split_pane(workspace.active_pane().clone(), SplitDirection::Right, cx);
|
||||
workspace.split_and_clone(workspace.active_pane().clone(), SplitDirection::Right, cx);
|
||||
});
|
||||
workspace_a
|
||||
.update(cx_a, |workspace, cx| {
|
||||
|
@ -7228,7 +7228,7 @@ async fn test_peers_following_each_other(
|
|||
.await
|
||||
.unwrap();
|
||||
workspace_b.update(cx_b, |workspace, cx| {
|
||||
workspace.split_pane(workspace.active_pane().clone(), SplitDirection::Right, cx);
|
||||
workspace.split_and_clone(workspace.active_pane().clone(), SplitDirection::Right, cx);
|
||||
});
|
||||
workspace_b
|
||||
.update(cx_b, |workspace, cx| {
|
||||
|
@ -7455,7 +7455,7 @@ async fn test_auto_unfollowing(
|
|||
|
||||
// When client B activates a different pane, it continues following client A in the original pane.
|
||||
workspace_b.update(cx_b, |workspace, cx| {
|
||||
workspace.split_pane(pane_b.clone(), SplitDirection::Right, cx)
|
||||
workspace.split_and_clone(pane_b.clone(), SplitDirection::Right, cx)
|
||||
});
|
||||
assert_eq!(
|
||||
workspace_b.read_with(cx_b, |workspace, _| workspace.leader_for_pane(&pane_b)),
|
||||
|
|
|
@ -365,7 +365,7 @@ async fn apply_client_operation(
|
|||
}
|
||||
|
||||
log::info!("{}: declining incoming call", client.username);
|
||||
active_call.update(cx, |call, _| call.decline_incoming())?;
|
||||
active_call.update(cx, |call, cx| call.decline_incoming(cx))?;
|
||||
}
|
||||
|
||||
ClientOperation::LeaveCall => {
|
||||
|
|
|
@ -39,6 +39,7 @@ recent_projects = {path = "../recent_projects"}
|
|||
settings = { path = "../settings" }
|
||||
theme = { path = "../theme" }
|
||||
theme_selector = { path = "../theme_selector" }
|
||||
vcs_menu = { path = "../vcs_menu" }
|
||||
util = { path = "../util" }
|
||||
workspace = { path = "../workspace" }
|
||||
zed-actions = {path = "../zed-actions"}
|
||||
|
|
|
@ -1,8 +1,5 @@
|
|||
use crate::{
|
||||
branch_list::{build_branch_list, BranchList},
|
||||
contact_notification::ContactNotification,
|
||||
contacts_popover,
|
||||
face_pile::FacePile,
|
||||
contact_notification::ContactNotification, contacts_popover, face_pile::FacePile,
|
||||
toggle_deafen, toggle_mute, toggle_screen_sharing, LeaveCall, ToggleDeafen, ToggleMute,
|
||||
ToggleScreenSharing,
|
||||
};
|
||||
|
@ -27,6 +24,7 @@ use recent_projects::{build_recent_projects, RecentProjects};
|
|||
use std::{ops::Range, sync::Arc};
|
||||
use theme::{AvatarStyle, Theme};
|
||||
use util::ResultExt;
|
||||
use vcs_menu::{build_branch_list, BranchList, OpenRecent as ToggleVcsMenu};
|
||||
use workspace::{FollowNextCollaborator, Workspace, WORKSPACE_DB};
|
||||
|
||||
const MAX_PROJECT_NAME_LENGTH: usize = 40;
|
||||
|
@ -37,7 +35,6 @@ actions!(
|
|||
[
|
||||
ToggleContactsMenu,
|
||||
ToggleUserMenu,
|
||||
ToggleVcsMenu,
|
||||
ToggleProjectMenu,
|
||||
SwitchBranch,
|
||||
ShareProject,
|
||||
|
@ -286,7 +283,7 @@ impl CollabTitlebarItem {
|
|||
.with_tooltip::<BranchPopoverTooltip>(
|
||||
0,
|
||||
"Recent branches".into(),
|
||||
None,
|
||||
Some(Box::new(ToggleVcsMenu)),
|
||||
theme.tooltip.clone(),
|
||||
cx,
|
||||
)
|
||||
|
|
|
@ -1,4 +1,3 @@
|
|||
mod branch_list;
|
||||
mod collab_titlebar_item;
|
||||
mod contact_finder;
|
||||
mod contact_list;
|
||||
|
@ -29,7 +28,7 @@ actions!(
|
|||
);
|
||||
|
||||
pub fn init(app_state: &Arc<AppState>, cx: &mut AppContext) {
|
||||
branch_list::init(cx);
|
||||
vcs_menu::init(cx);
|
||||
collab_titlebar_item::init(cx);
|
||||
contact_list::init(cx);
|
||||
contact_finder::init(cx);
|
||||
|
@ -45,11 +44,25 @@ pub fn init(app_state: &Arc<AppState>, cx: &mut AppContext) {
|
|||
}
|
||||
|
||||
pub fn toggle_screen_sharing(_: &ToggleScreenSharing, cx: &mut AppContext) {
|
||||
if let Some(room) = ActiveCall::global(cx).read(cx).room().cloned() {
|
||||
let call = ActiveCall::global(cx).read(cx);
|
||||
if let Some(room) = call.room().cloned() {
|
||||
let client = call.client();
|
||||
let toggle_screen_sharing = room.update(cx, |room, cx| {
|
||||
if room.is_screen_sharing() {
|
||||
ActiveCall::report_call_event_for_room(
|
||||
"disable screen share",
|
||||
room.id(),
|
||||
&client,
|
||||
cx,
|
||||
);
|
||||
Task::ready(room.unshare_screen(cx))
|
||||
} else {
|
||||
ActiveCall::report_call_event_for_room(
|
||||
"enable screen share",
|
||||
room.id(),
|
||||
&client,
|
||||
cx,
|
||||
);
|
||||
room.share_screen(cx)
|
||||
}
|
||||
});
|
||||
|
|
|
@ -67,7 +67,7 @@ impl PickerDelegate for ContactFinderDelegate {
|
|||
})
|
||||
}
|
||||
|
||||
fn confirm(&mut self, cx: &mut ViewContext<Picker<Self>>) {
|
||||
fn confirm(&mut self, _: bool, cx: &mut ViewContext<Picker<Self>>) {
|
||||
if let Some(user) = self.potential_contacts.get(self.selected_index) {
|
||||
let user_store = self.user_store.read(cx);
|
||||
match user_store.contact_request_status(user) {
|
||||
|
|
|
@ -99,8 +99,8 @@ impl IncomingCallNotification {
|
|||
})
|
||||
.detach_and_log_err(cx);
|
||||
} else {
|
||||
active_call.update(cx, |active_call, _| {
|
||||
active_call.decline_incoming().log_err();
|
||||
active_call.update(cx, |active_call, cx| {
|
||||
active_call.decline_incoming(cx).log_err();
|
||||
});
|
||||
}
|
||||
}
|
||||
|
|
|
@ -160,7 +160,7 @@ impl PickerDelegate for CommandPaletteDelegate {
|
|||
|
||||
fn dismissed(&mut self, _cx: &mut ViewContext<Picker<Self>>) {}
|
||||
|
||||
fn confirm(&mut self, cx: &mut ViewContext<Picker<Self>>) {
|
||||
fn confirm(&mut self, _: bool, cx: &mut ViewContext<Picker<Self>>) {
|
||||
if !self.matches.is_empty() {
|
||||
let window_id = cx.window_id();
|
||||
let focused_view_id = self.focused_view_id;
|
||||
|
@ -369,6 +369,7 @@ mod tests {
|
|||
editor::init(cx);
|
||||
workspace::init(app_state.clone(), cx);
|
||||
init(cx);
|
||||
Project::init_settings(cx);
|
||||
app_state
|
||||
})
|
||||
}
|
||||
|
|
|
@ -7,7 +7,6 @@ use anyhow::Context;
|
|||
use gpui::AppContext;
|
||||
pub use indoc::indoc;
|
||||
pub use lazy_static;
|
||||
use parking_lot::{Mutex, RwLock};
|
||||
pub use smol;
|
||||
pub use sqlez;
|
||||
pub use sqlez_macros;
|
||||
|
@ -17,11 +16,9 @@ pub use util::paths::DB_DIR;
|
|||
use sqlez::domain::Migrator;
|
||||
use sqlez::thread_safe_connection::ThreadSafeConnection;
|
||||
use sqlez_macros::sql;
|
||||
use std::fs::create_dir_all;
|
||||
use std::future::Future;
|
||||
use std::path::{Path, PathBuf};
|
||||
use std::sync::atomic::{AtomicBool, Ordering};
|
||||
use std::time::{SystemTime, UNIX_EPOCH};
|
||||
use util::channel::ReleaseChannel;
|
||||
use util::{async_iife, ResultExt};
|
||||
|
||||
|
@ -42,10 +39,8 @@ const DB_FILE_NAME: &'static str = "db.sqlite";
|
|||
|
||||
lazy_static::lazy_static! {
|
||||
pub static ref ZED_STATELESS: bool = std::env::var("ZED_STATELESS").map_or(false, |v| !v.is_empty());
|
||||
pub static ref BACKUP_DB_PATH: RwLock<Option<PathBuf>> = RwLock::new(None);
|
||||
pub static ref ALL_FILE_DB_FAILED: AtomicBool = AtomicBool::new(false);
|
||||
}
|
||||
static DB_FILE_OPERATIONS: Mutex<()> = Mutex::new(());
|
||||
|
||||
/// Open or create a database at the given directory path.
|
||||
/// This will retry a couple times if there are failures. If opening fails once, the db directory
|
||||
|
@ -63,66 +58,14 @@ pub async fn open_db<M: Migrator + 'static>(
|
|||
let main_db_dir = db_dir.join(Path::new(&format!("0-{}", release_channel_name)));
|
||||
|
||||
let connection = async_iife!({
|
||||
// Note: This still has a race condition where 1 set of migrations succeeds
|
||||
// (e.g. (Workspace, Editor)) and another fails (e.g. (Workspace, Terminal))
|
||||
// This will cause the first connection to have the database taken out
|
||||
// from under it. This *should* be fine though. The second dabatase failure will
|
||||
// cause errors in the log and so should be observed by developers while writing
|
||||
// soon-to-be good migrations. If user databases are corrupted, we toss them out
|
||||
// and try again from a blank. As long as running all migrations from start to end
|
||||
// on a blank database is ok, this race condition will never be triggered.
|
||||
//
|
||||
// Basically: Don't ever push invalid migrations to stable or everyone will have
|
||||
// a bad time.
|
||||
|
||||
// If no db folder, create one at 0-{channel}
|
||||
create_dir_all(&main_db_dir).context("Could not create db directory")?;
|
||||
smol::fs::create_dir_all(&main_db_dir)
|
||||
.await
|
||||
.context("Could not create db directory")
|
||||
.log_err()?;
|
||||
let db_path = main_db_dir.join(Path::new(DB_FILE_NAME));
|
||||
|
||||
// Optimistically open databases in parallel
|
||||
if !DB_FILE_OPERATIONS.is_locked() {
|
||||
// Try building a connection
|
||||
if let Some(connection) = open_main_db(&db_path).await {
|
||||
return Ok(connection)
|
||||
};
|
||||
}
|
||||
|
||||
// Take a lock in the failure case so that we move the db once per process instead
|
||||
// of potentially multiple times from different threads. This shouldn't happen in the
|
||||
// normal path
|
||||
let _lock = DB_FILE_OPERATIONS.lock();
|
||||
if let Some(connection) = open_main_db(&db_path).await {
|
||||
return Ok(connection)
|
||||
};
|
||||
|
||||
let backup_timestamp = SystemTime::now()
|
||||
.duration_since(UNIX_EPOCH)
|
||||
.expect("System clock is set before the unix timestamp, Zed does not support this region of spacetime")
|
||||
.as_millis();
|
||||
|
||||
// If failed, move 0-{channel} to {current unix timestamp}-{channel}
|
||||
let backup_db_dir = db_dir.join(Path::new(&format!(
|
||||
"{}-{}",
|
||||
backup_timestamp,
|
||||
release_channel_name,
|
||||
)));
|
||||
|
||||
std::fs::rename(&main_db_dir, &backup_db_dir)
|
||||
.context("Failed clean up corrupted database, panicking.")?;
|
||||
|
||||
// Set a static ref with the failed timestamp and error so we can notify the user
|
||||
{
|
||||
let mut guard = BACKUP_DB_PATH.write();
|
||||
*guard = Some(backup_db_dir);
|
||||
}
|
||||
|
||||
// Create a new 0-{channel}
|
||||
create_dir_all(&main_db_dir).context("Should be able to create the database directory")?;
|
||||
let db_path = main_db_dir.join(Path::new(DB_FILE_NAME));
|
||||
|
||||
// Try again
|
||||
open_main_db(&db_path).await.context("Could not newly created db")
|
||||
}).await.log_err();
|
||||
open_main_db(&db_path).await
|
||||
})
|
||||
.await;
|
||||
|
||||
if let Some(connection) = connection {
|
||||
return connection;
|
||||
|
@ -249,13 +192,13 @@ where
|
|||
|
||||
#[cfg(test)]
|
||||
mod tests {
|
||||
use std::{fs, thread};
|
||||
use std::thread;
|
||||
|
||||
use sqlez::{connection::Connection, domain::Domain};
|
||||
use sqlez::domain::Domain;
|
||||
use sqlez_macros::sql;
|
||||
use tempdir::TempDir;
|
||||
|
||||
use crate::{open_db, DB_FILE_NAME};
|
||||
use crate::open_db;
|
||||
|
||||
// Test bad migration panics
|
||||
#[gpui::test]
|
||||
|
@ -321,31 +264,10 @@ mod tests {
|
|||
.unwrap()
|
||||
.is_none()
|
||||
);
|
||||
|
||||
let mut corrupted_backup_dir = fs::read_dir(tempdir.path())
|
||||
.unwrap()
|
||||
.find(|entry| {
|
||||
!entry
|
||||
.as_ref()
|
||||
.unwrap()
|
||||
.file_name()
|
||||
.to_str()
|
||||
.unwrap()
|
||||
.starts_with("0")
|
||||
})
|
||||
.unwrap()
|
||||
.unwrap()
|
||||
.path();
|
||||
corrupted_backup_dir.push(DB_FILE_NAME);
|
||||
|
||||
let backup = Connection::open_file(&corrupted_backup_dir.to_string_lossy());
|
||||
assert!(backup.select_row::<usize>("SELECT * FROM test").unwrap()()
|
||||
.unwrap()
|
||||
.is_none());
|
||||
}
|
||||
|
||||
/// Test that DB exists but corrupted (causing recreate)
|
||||
#[gpui::test]
|
||||
#[gpui::test(iterations = 30)]
|
||||
async fn test_simultaneous_db_corruption() {
|
||||
enum CorruptedDB {}
|
||||
|
||||
|
|
|
@ -57,16 +57,16 @@ ordered-float.workspace = true
|
|||
parking_lot.workspace = true
|
||||
postage.workspace = true
|
||||
pulldown-cmark = { version = "0.9.2", default-features = false }
|
||||
rand = { workspace = true, optional = true }
|
||||
schemars.workspace = true
|
||||
serde.workspace = true
|
||||
serde_derive.workspace = true
|
||||
smallvec.workspace = true
|
||||
smol.workspace = true
|
||||
tree-sitter-rust = { version = "*", optional = true }
|
||||
tree-sitter-html = { version = "*", optional = true }
|
||||
tree-sitter-javascript = { version = "*", optional = true }
|
||||
tree-sitter-typescript = { git = "https://github.com/tree-sitter/tree-sitter-typescript", rev = "5d20856f34315b068c41edaee2ac8a100081d259", optional = true }
|
||||
|
||||
rand = { workspace = true, optional = true }
|
||||
tree-sitter-rust = { workspace = true, optional = true }
|
||||
tree-sitter-html = { workspace = true, optional = true }
|
||||
tree-sitter-typescript = { workspace = true, optional = true }
|
||||
|
||||
[dev-dependencies]
|
||||
copilot = { path = "../copilot", features = ["test-support"] }
|
||||
|
@ -84,7 +84,6 @@ env_logger.workspace = true
|
|||
rand.workspace = true
|
||||
unindent.workspace = true
|
||||
tree-sitter.workspace = true
|
||||
tree-sitter-rust = "0.20"
|
||||
tree-sitter-html = "0.19"
|
||||
tree-sitter-typescript = { git = "https://github.com/tree-sitter/tree-sitter-typescript", rev = "5d20856f34315b068c41edaee2ac8a100081d259" }
|
||||
tree-sitter-javascript = "0.20"
|
||||
tree-sitter-rust.workspace = true
|
||||
tree-sitter-html.workspace = true
|
||||
tree-sitter-typescript.workspace = true
|
||||
|
|
|
@ -271,7 +271,9 @@ actions!(
|
|||
SelectLargerSyntaxNode,
|
||||
SelectSmallerSyntaxNode,
|
||||
GoToDefinition,
|
||||
GoToDefinitionSplit,
|
||||
GoToTypeDefinition,
|
||||
GoToTypeDefinitionSplit,
|
||||
MoveToEnclosingBracket,
|
||||
UndoSelection,
|
||||
RedoSelection,
|
||||
|
@ -407,7 +409,9 @@ pub fn init(cx: &mut AppContext) {
|
|||
cx.add_action(Editor::go_to_hunk);
|
||||
cx.add_action(Editor::go_to_prev_hunk);
|
||||
cx.add_action(Editor::go_to_definition);
|
||||
cx.add_action(Editor::go_to_definition_split);
|
||||
cx.add_action(Editor::go_to_type_definition);
|
||||
cx.add_action(Editor::go_to_type_definition_split);
|
||||
cx.add_action(Editor::fold);
|
||||
cx.add_action(Editor::fold_at);
|
||||
cx.add_action(Editor::unfold_lines);
|
||||
|
@ -494,6 +498,7 @@ pub enum SoftWrap {
|
|||
#[derive(Clone)]
|
||||
pub struct EditorStyle {
|
||||
pub text: TextStyle,
|
||||
pub line_height_scalar: f32,
|
||||
pub placeholder_text: Option<TextStyle>,
|
||||
pub theme: theme::Editor,
|
||||
pub theme_id: usize,
|
||||
|
@ -6184,14 +6189,31 @@ impl Editor {
|
|||
}
|
||||
|
||||
pub fn go_to_definition(&mut self, _: &GoToDefinition, cx: &mut ViewContext<Self>) {
|
||||
self.go_to_definition_of_kind(GotoDefinitionKind::Symbol, cx);
|
||||
self.go_to_definition_of_kind(GotoDefinitionKind::Symbol, false, cx);
|
||||
}
|
||||
|
||||
pub fn go_to_type_definition(&mut self, _: &GoToTypeDefinition, cx: &mut ViewContext<Self>) {
|
||||
self.go_to_definition_of_kind(GotoDefinitionKind::Type, cx);
|
||||
self.go_to_definition_of_kind(GotoDefinitionKind::Type, false, cx);
|
||||
}
|
||||
|
||||
fn go_to_definition_of_kind(&mut self, kind: GotoDefinitionKind, cx: &mut ViewContext<Self>) {
|
||||
pub fn go_to_definition_split(&mut self, _: &GoToDefinitionSplit, cx: &mut ViewContext<Self>) {
|
||||
self.go_to_definition_of_kind(GotoDefinitionKind::Symbol, true, cx);
|
||||
}
|
||||
|
||||
pub fn go_to_type_definition_split(
|
||||
&mut self,
|
||||
_: &GoToTypeDefinitionSplit,
|
||||
cx: &mut ViewContext<Self>,
|
||||
) {
|
||||
self.go_to_definition_of_kind(GotoDefinitionKind::Type, true, cx);
|
||||
}
|
||||
|
||||
fn go_to_definition_of_kind(
|
||||
&mut self,
|
||||
kind: GotoDefinitionKind,
|
||||
split: bool,
|
||||
cx: &mut ViewContext<Self>,
|
||||
) {
|
||||
let Some(workspace) = self.workspace(cx) else { return };
|
||||
let buffer = self.buffer.read(cx);
|
||||
let head = self.selections.newest::<usize>(cx).head();
|
||||
|
@ -6210,7 +6232,7 @@ impl Editor {
|
|||
cx.spawn_labeled("Fetching Definition...", |editor, mut cx| async move {
|
||||
let definitions = definitions.await?;
|
||||
editor.update(&mut cx, |editor, cx| {
|
||||
editor.navigate_to_definitions(definitions, cx);
|
||||
editor.navigate_to_definitions(definitions, split, cx);
|
||||
})?;
|
||||
Ok::<(), anyhow::Error>(())
|
||||
})
|
||||
|
@ -6220,6 +6242,7 @@ impl Editor {
|
|||
pub fn navigate_to_definitions(
|
||||
&mut self,
|
||||
mut definitions: Vec<LocationLink>,
|
||||
split: bool,
|
||||
cx: &mut ViewContext<Editor>,
|
||||
) {
|
||||
let Some(workspace) = self.workspace(cx) else { return };
|
||||
|
@ -6239,7 +6262,11 @@ impl Editor {
|
|||
} else {
|
||||
cx.window_context().defer(move |cx| {
|
||||
let target_editor: ViewHandle<Self> = workspace.update(cx, |workspace, cx| {
|
||||
workspace.open_project_item(definition.target.buffer.clone(), cx)
|
||||
if split {
|
||||
workspace.split_project_item(definition.target.buffer.clone(), cx)
|
||||
} else {
|
||||
workspace.open_project_item(definition.target.buffer.clone(), cx)
|
||||
}
|
||||
});
|
||||
target_editor.update(cx, |target_editor, cx| {
|
||||
// When selecting a definition in a different buffer, disable the nav history
|
||||
|
@ -6275,7 +6302,9 @@ impl Editor {
|
|||
.map(|definition| definition.target)
|
||||
.collect();
|
||||
workspace.update(cx, |workspace, cx| {
|
||||
Self::open_locations_in_multibuffer(workspace, locations, replica_id, title, cx)
|
||||
Self::open_locations_in_multibuffer(
|
||||
workspace, locations, replica_id, title, split, cx,
|
||||
)
|
||||
});
|
||||
});
|
||||
}
|
||||
|
@ -6320,7 +6349,7 @@ impl Editor {
|
|||
})
|
||||
.unwrap();
|
||||
Self::open_locations_in_multibuffer(
|
||||
workspace, locations, replica_id, title, cx,
|
||||
workspace, locations, replica_id, title, false, cx,
|
||||
);
|
||||
})?;
|
||||
|
||||
|
@ -6335,6 +6364,7 @@ impl Editor {
|
|||
mut locations: Vec<Location>,
|
||||
replica_id: ReplicaId,
|
||||
title: String,
|
||||
split: bool,
|
||||
cx: &mut ViewContext<Workspace>,
|
||||
) {
|
||||
// If there are multiple definitions, open them in a multibuffer
|
||||
|
@ -6381,7 +6411,11 @@ impl Editor {
|
|||
cx,
|
||||
);
|
||||
});
|
||||
workspace.add_item(Box::new(editor), cx);
|
||||
if split {
|
||||
workspace.split_item(Box::new(editor), cx);
|
||||
} else {
|
||||
workspace.add_item(Box::new(editor), cx);
|
||||
}
|
||||
}
|
||||
|
||||
pub fn rename(&mut self, _: &Rename, cx: &mut ViewContext<Self>) -> Option<Task<Result<()>>> {
|
||||
|
@ -7222,6 +7256,47 @@ impl Editor {
|
|||
}
|
||||
results
|
||||
}
|
||||
pub fn background_highlights_in_range_for<T: 'static>(
|
||||
&self,
|
||||
search_range: Range<Anchor>,
|
||||
display_snapshot: &DisplaySnapshot,
|
||||
theme: &Theme,
|
||||
) -> Vec<(Range<DisplayPoint>, Color)> {
|
||||
let mut results = Vec::new();
|
||||
let buffer = &display_snapshot.buffer_snapshot;
|
||||
let Some((color_fetcher, ranges)) = self.background_highlights
|
||||
.get(&TypeId::of::<T>()) else {
|
||||
return vec![];
|
||||
};
|
||||
|
||||
let color = color_fetcher(theme);
|
||||
let start_ix = match ranges.binary_search_by(|probe| {
|
||||
let cmp = probe.end.cmp(&search_range.start, buffer);
|
||||
if cmp.is_gt() {
|
||||
Ordering::Greater
|
||||
} else {
|
||||
Ordering::Less
|
||||
}
|
||||
}) {
|
||||
Ok(i) | Err(i) => i,
|
||||
};
|
||||
for range in &ranges[start_ix..] {
|
||||
if range.start.cmp(&search_range.end, buffer).is_ge() {
|
||||
break;
|
||||
}
|
||||
let start = range
|
||||
.start
|
||||
.to_point(buffer)
|
||||
.to_display_point(display_snapshot);
|
||||
let end = range
|
||||
.end
|
||||
.to_point(buffer)
|
||||
.to_display_point(display_snapshot);
|
||||
results.push((start..end, color))
|
||||
}
|
||||
|
||||
results
|
||||
}
|
||||
|
||||
pub fn highlight_text<T: 'static>(
|
||||
&mut self,
|
||||
|
@ -7524,7 +7599,7 @@ impl Editor {
|
|||
|
||||
fn report_editor_event(
|
||||
&self,
|
||||
name: &'static str,
|
||||
operation: &'static str,
|
||||
file_extension: Option<String>,
|
||||
cx: &AppContext,
|
||||
) {
|
||||
|
@ -7561,7 +7636,7 @@ impl Editor {
|
|||
let event = ClickhouseEvent::Editor {
|
||||
file_extension,
|
||||
vim_mode,
|
||||
operation: name,
|
||||
operation,
|
||||
copilot_enabled,
|
||||
copilot_enabled_for_language,
|
||||
};
|
||||
|
@ -8060,7 +8135,7 @@ fn build_style(
|
|||
cx: &AppContext,
|
||||
) -> EditorStyle {
|
||||
let font_cache = cx.font_cache();
|
||||
|
||||
let line_height_scalar = settings.line_height();
|
||||
let theme_id = settings.theme.meta.id;
|
||||
let mut theme = settings.theme.editor.clone();
|
||||
let mut style = if let Some(get_field_editor_theme) = get_field_editor_theme {
|
||||
|
@ -8074,6 +8149,7 @@ fn build_style(
|
|||
EditorStyle {
|
||||
text: field_editor_theme.text,
|
||||
placeholder_text: field_editor_theme.placeholder_text,
|
||||
line_height_scalar,
|
||||
theme,
|
||||
theme_id,
|
||||
}
|
||||
|
@ -8096,6 +8172,7 @@ fn build_style(
|
|||
underline: Default::default(),
|
||||
},
|
||||
placeholder_text: None,
|
||||
line_height_scalar,
|
||||
theme,
|
||||
theme_id,
|
||||
}
|
||||
|
|
|
@ -22,7 +22,10 @@ use language::{
|
|||
BracketPairConfig, FakeLspAdapter, LanguageConfig, LanguageRegistry, Point,
|
||||
};
|
||||
use parking_lot::Mutex;
|
||||
use project::project_settings::{LspSettings, ProjectSettings};
|
||||
use project::FakeFs;
|
||||
use std::sync::atomic;
|
||||
use std::sync::atomic::AtomicUsize;
|
||||
use std::{cell::RefCell, future::Future, rc::Rc, time::Instant};
|
||||
use unindent::Unindent;
|
||||
use util::{
|
||||
|
@ -1796,7 +1799,7 @@ async fn test_newline_comments(cx: &mut gpui::TestAppContext) {
|
|||
"});
|
||||
}
|
||||
// Ensure that comment continuations can be disabled.
|
||||
update_test_settings(cx, |settings| {
|
||||
update_test_language_settings(cx, |settings| {
|
||||
settings.defaults.extend_comment_on_newline = Some(false);
|
||||
});
|
||||
let mut cx = EditorTestContext::new(cx).await;
|
||||
|
@ -3833,7 +3836,7 @@ async fn test_autoclose_with_embedded_language(cx: &mut gpui::TestAppContext) {
|
|||
autoclose_before: "})]>".into(),
|
||||
..Default::default()
|
||||
},
|
||||
Some(tree_sitter_javascript::language()),
|
||||
Some(tree_sitter_typescript::language_tsx()),
|
||||
));
|
||||
|
||||
let registry = Arc::new(LanguageRegistry::test());
|
||||
|
@ -4546,7 +4549,7 @@ async fn test_document_format_during_save(cx: &mut gpui::TestAppContext) {
|
|||
assert!(!cx.read(|cx| editor.is_dirty(cx)));
|
||||
|
||||
// Set rust language override and assert overridden tabsize is sent to language server
|
||||
update_test_settings(cx, |settings| {
|
||||
update_test_language_settings(cx, |settings| {
|
||||
settings.languages.insert(
|
||||
"Rust".into(),
|
||||
LanguageSettingsContent {
|
||||
|
@ -4660,7 +4663,7 @@ async fn test_range_format_during_save(cx: &mut gpui::TestAppContext) {
|
|||
assert!(!cx.read(|cx| editor.is_dirty(cx)));
|
||||
|
||||
// Set rust language override and assert overridden tabsize is sent to language server
|
||||
update_test_settings(cx, |settings| {
|
||||
update_test_language_settings(cx, |settings| {
|
||||
settings.languages.insert(
|
||||
"Rust".into(),
|
||||
LanguageSettingsContent {
|
||||
|
@ -5380,7 +5383,7 @@ async fn test_toggle_block_comment(cx: &mut gpui::TestAppContext) {
|
|||
line_comment: Some("// ".into()),
|
||||
..Default::default()
|
||||
},
|
||||
Some(tree_sitter_javascript::language()),
|
||||
Some(tree_sitter_typescript::language_tsx()),
|
||||
));
|
||||
|
||||
let registry = Arc::new(LanguageRegistry::test());
|
||||
|
@ -7084,6 +7087,233 @@ async fn test_on_type_formatting_not_triggered(cx: &mut gpui::TestAppContext) {
|
|||
});
|
||||
}
|
||||
|
||||
#[gpui::test]
|
||||
async fn test_language_server_restart_due_to_settings_change(cx: &mut gpui::TestAppContext) {
|
||||
init_test(cx, |_| {});
|
||||
|
||||
let language_name: Arc<str> = "Rust".into();
|
||||
let mut language = Language::new(
|
||||
LanguageConfig {
|
||||
name: Arc::clone(&language_name),
|
||||
path_suffixes: vec!["rs".to_string()],
|
||||
..Default::default()
|
||||
},
|
||||
Some(tree_sitter_rust::language()),
|
||||
);
|
||||
|
||||
let server_restarts = Arc::new(AtomicUsize::new(0));
|
||||
let closure_restarts = Arc::clone(&server_restarts);
|
||||
let language_server_name = "test language server";
|
||||
let mut fake_servers = language
|
||||
.set_fake_lsp_adapter(Arc::new(FakeLspAdapter {
|
||||
name: language_server_name,
|
||||
initialization_options: Some(json!({
|
||||
"testOptionValue": true
|
||||
})),
|
||||
initializer: Some(Box::new(move |fake_server| {
|
||||
let task_restarts = Arc::clone(&closure_restarts);
|
||||
fake_server.handle_request::<lsp::request::Shutdown, _, _>(move |_, _| {
|
||||
task_restarts.fetch_add(1, atomic::Ordering::Release);
|
||||
futures::future::ready(Ok(()))
|
||||
});
|
||||
})),
|
||||
..Default::default()
|
||||
}))
|
||||
.await;
|
||||
|
||||
let fs = FakeFs::new(cx.background());
|
||||
fs.insert_tree(
|
||||
"/a",
|
||||
json!({
|
||||
"main.rs": "fn main() { let a = 5; }",
|
||||
"other.rs": "// Test file",
|
||||
}),
|
||||
)
|
||||
.await;
|
||||
let project = Project::test(fs, ["/a".as_ref()], cx).await;
|
||||
project.update(cx, |project, _| project.languages().add(Arc::new(language)));
|
||||
let (_, _workspace) = cx.add_window(|cx| Workspace::test_new(project.clone(), cx));
|
||||
let _buffer = project
|
||||
.update(cx, |project, cx| {
|
||||
project.open_local_buffer("/a/main.rs", cx)
|
||||
})
|
||||
.await
|
||||
.unwrap();
|
||||
let _fake_server = fake_servers.next().await.unwrap();
|
||||
update_test_language_settings(cx, |language_settings| {
|
||||
language_settings.languages.insert(
|
||||
Arc::clone(&language_name),
|
||||
LanguageSettingsContent {
|
||||
tab_size: NonZeroU32::new(8),
|
||||
..Default::default()
|
||||
},
|
||||
);
|
||||
});
|
||||
cx.foreground().run_until_parked();
|
||||
assert_eq!(
|
||||
server_restarts.load(atomic::Ordering::Acquire),
|
||||
0,
|
||||
"Should not restart LSP server on an unrelated change"
|
||||
);
|
||||
|
||||
update_test_project_settings(cx, |project_settings| {
|
||||
project_settings.lsp.insert(
|
||||
"Some other server name".into(),
|
||||
LspSettings {
|
||||
initialization_options: Some(json!({
|
||||
"some other init value": false
|
||||
})),
|
||||
},
|
||||
);
|
||||
});
|
||||
cx.foreground().run_until_parked();
|
||||
assert_eq!(
|
||||
server_restarts.load(atomic::Ordering::Acquire),
|
||||
0,
|
||||
"Should not restart LSP server on an unrelated LSP settings change"
|
||||
);
|
||||
|
||||
update_test_project_settings(cx, |project_settings| {
|
||||
project_settings.lsp.insert(
|
||||
language_server_name.into(),
|
||||
LspSettings {
|
||||
initialization_options: Some(json!({
|
||||
"anotherInitValue": false
|
||||
})),
|
||||
},
|
||||
);
|
||||
});
|
||||
cx.foreground().run_until_parked();
|
||||
assert_eq!(
|
||||
server_restarts.load(atomic::Ordering::Acquire),
|
||||
1,
|
||||
"Should restart LSP server on a related LSP settings change"
|
||||
);
|
||||
|
||||
update_test_project_settings(cx, |project_settings| {
|
||||
project_settings.lsp.insert(
|
||||
language_server_name.into(),
|
||||
LspSettings {
|
||||
initialization_options: Some(json!({
|
||||
"anotherInitValue": false
|
||||
})),
|
||||
},
|
||||
);
|
||||
});
|
||||
cx.foreground().run_until_parked();
|
||||
assert_eq!(
|
||||
server_restarts.load(atomic::Ordering::Acquire),
|
||||
1,
|
||||
"Should not restart LSP server on a related LSP settings change that is the same"
|
||||
);
|
||||
|
||||
update_test_project_settings(cx, |project_settings| {
|
||||
project_settings.lsp.insert(
|
||||
language_server_name.into(),
|
||||
LspSettings {
|
||||
initialization_options: None,
|
||||
},
|
||||
);
|
||||
});
|
||||
cx.foreground().run_until_parked();
|
||||
assert_eq!(
|
||||
server_restarts.load(atomic::Ordering::Acquire),
|
||||
2,
|
||||
"Should restart LSP server on another related LSP settings change"
|
||||
);
|
||||
}
|
||||
|
||||
#[gpui::test]
|
||||
async fn test_completions_with_additional_edits(cx: &mut gpui::TestAppContext) {
|
||||
init_test(cx, |_| {});
|
||||
|
||||
let mut cx = EditorLspTestContext::new_rust(
|
||||
lsp::ServerCapabilities {
|
||||
completion_provider: Some(lsp::CompletionOptions {
|
||||
trigger_characters: Some(vec![".".to_string()]),
|
||||
..Default::default()
|
||||
}),
|
||||
..Default::default()
|
||||
},
|
||||
cx,
|
||||
)
|
||||
.await;
|
||||
|
||||
cx.set_state(indoc! {"fn main() { let a = 2ˇ; }"});
|
||||
cx.simulate_keystroke(".");
|
||||
let completion_item = lsp::CompletionItem {
|
||||
label: "some".into(),
|
||||
kind: Some(lsp::CompletionItemKind::SNIPPET),
|
||||
detail: Some("Wrap the expression in an `Option::Some`".to_string()),
|
||||
documentation: Some(lsp::Documentation::MarkupContent(lsp::MarkupContent {
|
||||
kind: lsp::MarkupKind::Markdown,
|
||||
value: "```rust\nSome(2)\n```".to_string(),
|
||||
})),
|
||||
deprecated: Some(false),
|
||||
sort_text: Some("fffffff2".to_string()),
|
||||
filter_text: Some("some".to_string()),
|
||||
insert_text_format: Some(lsp::InsertTextFormat::SNIPPET),
|
||||
text_edit: Some(lsp::CompletionTextEdit::Edit(lsp::TextEdit {
|
||||
range: lsp::Range {
|
||||
start: lsp::Position {
|
||||
line: 0,
|
||||
character: 22,
|
||||
},
|
||||
end: lsp::Position {
|
||||
line: 0,
|
||||
character: 22,
|
||||
},
|
||||
},
|
||||
new_text: "Some(2)".to_string(),
|
||||
})),
|
||||
additional_text_edits: Some(vec![lsp::TextEdit {
|
||||
range: lsp::Range {
|
||||
start: lsp::Position {
|
||||
line: 0,
|
||||
character: 20,
|
||||
},
|
||||
end: lsp::Position {
|
||||
line: 0,
|
||||
character: 22,
|
||||
},
|
||||
},
|
||||
new_text: "".to_string(),
|
||||
}]),
|
||||
..Default::default()
|
||||
};
|
||||
|
||||
let closure_completion_item = completion_item.clone();
|
||||
let mut request = cx.handle_request::<lsp::request::Completion, _, _>(move |_, _, _| {
|
||||
let task_completion_item = closure_completion_item.clone();
|
||||
async move {
|
||||
Ok(Some(lsp::CompletionResponse::Array(vec![
|
||||
task_completion_item,
|
||||
])))
|
||||
}
|
||||
});
|
||||
|
||||
request.next().await;
|
||||
|
||||
cx.condition(|editor, _| editor.context_menu_visible())
|
||||
.await;
|
||||
let apply_additional_edits = cx.update_editor(|editor, cx| {
|
||||
editor
|
||||
.confirm_completion(&ConfirmCompletion::default(), cx)
|
||||
.unwrap()
|
||||
});
|
||||
cx.assert_editor_state(indoc! {"fn main() { let a = 2.Some(2)ˇ; }"});
|
||||
|
||||
cx.handle_request::<lsp::request::ResolveCompletionItem, _, _>(move |_, _, _| {
|
||||
let task_completion_item = completion_item.clone();
|
||||
async move { Ok(task_completion_item) }
|
||||
})
|
||||
.next()
|
||||
.await
|
||||
.unwrap();
|
||||
apply_additional_edits.await.unwrap();
|
||||
cx.assert_editor_state(indoc! {"fn main() { let a = Some(2)ˇ; }"});
|
||||
}
|
||||
|
||||
fn empty_range(row: usize, column: usize) -> Range<DisplayPoint> {
|
||||
let point = DisplayPoint::new(row as u32, column as u32);
|
||||
point..point
|
||||
|
@ -7203,7 +7433,7 @@ fn handle_copilot_completion_request(
|
|||
});
|
||||
}
|
||||
|
||||
pub(crate) fn update_test_settings(
|
||||
pub(crate) fn update_test_language_settings(
|
||||
cx: &mut TestAppContext,
|
||||
f: impl Fn(&mut AllLanguageSettingsContent),
|
||||
) {
|
||||
|
@ -7214,6 +7444,17 @@ pub(crate) fn update_test_settings(
|
|||
});
|
||||
}
|
||||
|
||||
pub(crate) fn update_test_project_settings(
|
||||
cx: &mut TestAppContext,
|
||||
f: impl Fn(&mut ProjectSettings),
|
||||
) {
|
||||
cx.update(|cx| {
|
||||
cx.update_global::<SettingsStore, _, _>(|store, cx| {
|
||||
store.update_user_settings::<ProjectSettings>(cx, f);
|
||||
});
|
||||
});
|
||||
}
|
||||
|
||||
pub(crate) fn init_test(cx: &mut TestAppContext, f: fn(&mut AllLanguageSettingsContent)) {
|
||||
cx.foreground().forbid_parking();
|
||||
|
||||
|
@ -7227,5 +7468,5 @@ pub(crate) fn init_test(cx: &mut TestAppContext, f: fn(&mut AllLanguageSettingsC
|
|||
crate::init(cx);
|
||||
});
|
||||
|
||||
update_test_settings(cx, f);
|
||||
update_test_language_settings(cx, f);
|
||||
}
|
||||
|
|
|
@ -156,6 +156,7 @@ impl EditorElement {
|
|||
event.position,
|
||||
event.cmd,
|
||||
event.shift,
|
||||
event.alt,
|
||||
position_map.as_ref(),
|
||||
text_bounds,
|
||||
cx,
|
||||
|
@ -308,6 +309,7 @@ impl EditorElement {
|
|||
position: Vector2F,
|
||||
cmd: bool,
|
||||
shift: bool,
|
||||
alt: bool,
|
||||
position_map: &PositionMap,
|
||||
text_bounds: RectF,
|
||||
cx: &mut EventContext<Editor>,
|
||||
|
@ -324,9 +326,9 @@ impl EditorElement {
|
|||
|
||||
if point == target_point {
|
||||
if shift {
|
||||
go_to_fetched_type_definition(editor, point, cx);
|
||||
go_to_fetched_type_definition(editor, point, alt, cx);
|
||||
} else {
|
||||
go_to_fetched_definition(editor, point, cx);
|
||||
go_to_fetched_definition(editor, point, alt, cx);
|
||||
}
|
||||
|
||||
return true;
|
||||
|
@ -1086,11 +1088,13 @@ impl EditorElement {
|
|||
})
|
||||
}
|
||||
};
|
||||
for (row, _) in &editor.background_highlights_in_range(
|
||||
start_anchor..end_anchor,
|
||||
&layout.position_map.snapshot,
|
||||
&theme,
|
||||
) {
|
||||
for (row, _) in &editor
|
||||
.background_highlights_in_range_for::<crate::items::BufferSearchHighlights>(
|
||||
start_anchor..end_anchor,
|
||||
&layout.position_map.snapshot,
|
||||
&theme,
|
||||
)
|
||||
{
|
||||
let start_display = row.start;
|
||||
let end_display = row.end;
|
||||
|
||||
|
@ -1180,8 +1184,10 @@ impl EditorElement {
|
|||
});
|
||||
scene.push_mouse_region(
|
||||
MouseRegion::new::<ScrollbarMouseHandlers>(cx.view_id(), cx.view_id(), track_bounds)
|
||||
.on_move(move |_, editor: &mut Editor, cx| {
|
||||
editor.scroll_manager.show_scrollbar(cx);
|
||||
.on_move(move |event, editor: &mut Editor, cx| {
|
||||
if event.pressed_button.is_none() {
|
||||
editor.scroll_manager.show_scrollbar(cx);
|
||||
}
|
||||
})
|
||||
.on_down(MouseButton::Left, {
|
||||
let row_range = row_range.clone();
|
||||
|
@ -1971,7 +1977,7 @@ impl Element<Editor> for EditorElement {
|
|||
|
||||
let snapshot = editor.snapshot(cx);
|
||||
let style = self.style.clone();
|
||||
let line_height = style.text.line_height(cx.font_cache());
|
||||
let line_height = (style.text.font_size * style.line_height_scalar).round();
|
||||
|
||||
let gutter_padding;
|
||||
let gutter_width;
|
||||
|
@ -2149,6 +2155,9 @@ impl Element<Editor> for EditorElement {
|
|||
ShowScrollbar::Auto => {
|
||||
// Git
|
||||
(is_singleton && scrollbar_settings.git_diff && snapshot.buffer_snapshot.has_git_diffs())
|
||||
||
|
||||
// Selections
|
||||
(is_singleton && scrollbar_settings.selections && !highlighted_ranges.is_empty())
|
||||
// Scrollmanager
|
||||
|| editor.scroll_manager.scrollbars_visible()
|
||||
}
|
||||
|
@ -2911,7 +2920,7 @@ mod tests {
|
|||
use super::*;
|
||||
use crate::{
|
||||
display_map::{BlockDisposition, BlockProperties},
|
||||
editor_tests::{init_test, update_test_settings},
|
||||
editor_tests::{init_test, update_test_language_settings},
|
||||
Editor, MultiBuffer,
|
||||
};
|
||||
use gpui::TestAppContext;
|
||||
|
@ -3108,7 +3117,7 @@ mod tests {
|
|||
let resize_step = 10.0;
|
||||
let mut editor_width = 200.0;
|
||||
while editor_width <= 1000.0 {
|
||||
update_test_settings(cx, |s| {
|
||||
update_test_language_settings(cx, |s| {
|
||||
s.defaults.tab_size = NonZeroU32::new(tab_size);
|
||||
s.defaults.show_whitespaces = Some(ShowWhitespaceSetting::All);
|
||||
s.defaults.preferred_line_length = Some(editor_width as u32);
|
||||
|
|
|
@ -847,7 +847,7 @@ mod tests {
|
|||
use text::Point;
|
||||
use workspace::Workspace;
|
||||
|
||||
use crate::editor_tests::update_test_settings;
|
||||
use crate::editor_tests::update_test_language_settings;
|
||||
|
||||
use super::*;
|
||||
|
||||
|
@ -1476,7 +1476,7 @@ mod tests {
|
|||
),
|
||||
] {
|
||||
edits_made += 1;
|
||||
update_test_settings(cx, |settings| {
|
||||
update_test_language_settings(cx, |settings| {
|
||||
settings.defaults.inlay_hints = Some(InlayHintSettings {
|
||||
enabled: true,
|
||||
show_type_hints: new_allowed_hint_kinds.contains(&Some(InlayHintKind::Type)),
|
||||
|
@ -1520,7 +1520,7 @@ mod tests {
|
|||
|
||||
edits_made += 1;
|
||||
let another_allowed_hint_kinds = HashSet::from_iter([Some(InlayHintKind::Type)]);
|
||||
update_test_settings(cx, |settings| {
|
||||
update_test_language_settings(cx, |settings| {
|
||||
settings.defaults.inlay_hints = Some(InlayHintSettings {
|
||||
enabled: false,
|
||||
show_type_hints: another_allowed_hint_kinds.contains(&Some(InlayHintKind::Type)),
|
||||
|
@ -1577,7 +1577,7 @@ mod tests {
|
|||
|
||||
let final_allowed_hint_kinds = HashSet::from_iter([Some(InlayHintKind::Parameter)]);
|
||||
edits_made += 1;
|
||||
update_test_settings(cx, |settings| {
|
||||
update_test_language_settings(cx, |settings| {
|
||||
settings.defaults.inlay_hints = Some(InlayHintSettings {
|
||||
enabled: true,
|
||||
show_type_hints: final_allowed_hint_kinds.contains(&Some(InlayHintKind::Type)),
|
||||
|
@ -2269,7 +2269,7 @@ unedited (2nd) buffer should have the same hint");
|
|||
crate::init(cx);
|
||||
});
|
||||
|
||||
update_test_settings(cx, f);
|
||||
update_test_language_settings(cx, f);
|
||||
}
|
||||
|
||||
async fn prepare_test_objects(
|
||||
|
|
|
@ -883,14 +883,24 @@ impl ProjectItem for Editor {
|
|||
}
|
||||
}
|
||||
|
||||
enum BufferSearchHighlights {}
|
||||
pub(crate) enum BufferSearchHighlights {}
|
||||
impl SearchableItem for Editor {
|
||||
type Match = Range<Anchor>;
|
||||
|
||||
fn to_search_event(event: &Self::Event) -> Option<SearchEvent> {
|
||||
fn to_search_event(
|
||||
&mut self,
|
||||
event: &Self::Event,
|
||||
_: &mut ViewContext<Self>,
|
||||
) -> Option<SearchEvent> {
|
||||
match event {
|
||||
Event::BufferEdited => Some(SearchEvent::MatchesInvalidated),
|
||||
Event::SelectionsChanged { .. } => Some(SearchEvent::ActiveMatchChanged),
|
||||
Event::SelectionsChanged { .. } => {
|
||||
if self.selections.disjoint_anchors().len() == 1 {
|
||||
Some(SearchEvent::ActiveMatchChanged)
|
||||
} else {
|
||||
None
|
||||
}
|
||||
}
|
||||
_ => None,
|
||||
}
|
||||
}
|
||||
|
@ -941,6 +951,11 @@ impl SearchableItem for Editor {
|
|||
});
|
||||
}
|
||||
|
||||
fn select_matches(&mut self, matches: Vec<Self::Match>, cx: &mut ViewContext<Self>) {
|
||||
self.unfold_ranges(matches.clone(), false, false, cx);
|
||||
self.change_selections(None, cx, |s| s.select_ranges(matches));
|
||||
}
|
||||
|
||||
fn match_index_for_direction(
|
||||
&mut self,
|
||||
matches: &Vec<Range<Anchor>>,
|
||||
|
@ -949,8 +964,16 @@ impl SearchableItem for Editor {
|
|||
cx: &mut ViewContext<Self>,
|
||||
) -> usize {
|
||||
let buffer = self.buffer().read(cx).snapshot(cx);
|
||||
let cursor = self.selections.newest_anchor().head();
|
||||
if matches[current_index].start.cmp(&cursor, &buffer).is_gt() {
|
||||
let current_index_position = if self.selections.disjoint_anchors().len() == 1 {
|
||||
self.selections.newest_anchor().head()
|
||||
} else {
|
||||
matches[current_index].start
|
||||
};
|
||||
if matches[current_index]
|
||||
.start
|
||||
.cmp(¤t_index_position, &buffer)
|
||||
.is_gt()
|
||||
{
|
||||
if direction == Direction::Prev {
|
||||
if current_index == 0 {
|
||||
current_index = matches.len() - 1;
|
||||
|
@ -958,7 +981,11 @@ impl SearchableItem for Editor {
|
|||
current_index -= 1;
|
||||
}
|
||||
}
|
||||
} else if matches[current_index].end.cmp(&cursor, &buffer).is_lt() {
|
||||
} else if matches[current_index]
|
||||
.end
|
||||
.cmp(¤t_index_position, &buffer)
|
||||
.is_lt()
|
||||
{
|
||||
if direction == Direction::Next {
|
||||
current_index = 0;
|
||||
}
|
||||
|
|
|
@ -246,23 +246,26 @@ pub fn hide_link_definition(editor: &mut Editor, cx: &mut ViewContext<Editor>) {
|
|||
pub fn go_to_fetched_definition(
|
||||
editor: &mut Editor,
|
||||
point: DisplayPoint,
|
||||
split: bool,
|
||||
cx: &mut ViewContext<Editor>,
|
||||
) {
|
||||
go_to_fetched_definition_of_kind(LinkDefinitionKind::Symbol, editor, point, cx);
|
||||
go_to_fetched_definition_of_kind(LinkDefinitionKind::Symbol, editor, point, split, cx);
|
||||
}
|
||||
|
||||
pub fn go_to_fetched_type_definition(
|
||||
editor: &mut Editor,
|
||||
point: DisplayPoint,
|
||||
split: bool,
|
||||
cx: &mut ViewContext<Editor>,
|
||||
) {
|
||||
go_to_fetched_definition_of_kind(LinkDefinitionKind::Type, editor, point, cx);
|
||||
go_to_fetched_definition_of_kind(LinkDefinitionKind::Type, editor, point, split, cx);
|
||||
}
|
||||
|
||||
fn go_to_fetched_definition_of_kind(
|
||||
kind: LinkDefinitionKind,
|
||||
editor: &mut Editor,
|
||||
point: DisplayPoint,
|
||||
split: bool,
|
||||
cx: &mut ViewContext<Editor>,
|
||||
) {
|
||||
let cached_definitions = editor.link_go_to_definition_state.definitions.clone();
|
||||
|
@ -275,7 +278,7 @@ fn go_to_fetched_definition_of_kind(
|
|||
cx.focus_self();
|
||||
}
|
||||
|
||||
editor.navigate_to_definitions(cached_definitions, cx);
|
||||
editor.navigate_to_definitions(cached_definitions, split, cx);
|
||||
} else {
|
||||
editor.select(
|
||||
SelectPhase::Begin {
|
||||
|
@ -403,7 +406,7 @@ mod tests {
|
|||
});
|
||||
|
||||
cx.update_editor(|editor, cx| {
|
||||
go_to_fetched_type_definition(editor, hover_point, cx);
|
||||
go_to_fetched_type_definition(editor, hover_point, false, cx);
|
||||
});
|
||||
requests.next().await;
|
||||
cx.foreground().run_until_parked();
|
||||
|
@ -614,7 +617,7 @@ mod tests {
|
|||
|
||||
// Cmd click with existing definition doesn't re-request and dismisses highlight
|
||||
cx.update_editor(|editor, cx| {
|
||||
go_to_fetched_definition(editor, hover_point, cx);
|
||||
go_to_fetched_definition(editor, hover_point, false, cx);
|
||||
});
|
||||
// Assert selection moved to to definition
|
||||
cx.lsp
|
||||
|
@ -655,7 +658,7 @@ mod tests {
|
|||
])))
|
||||
});
|
||||
cx.update_editor(|editor, cx| {
|
||||
go_to_fetched_definition(editor, hover_point, cx);
|
||||
go_to_fetched_definition(editor, hover_point, false, cx);
|
||||
});
|
||||
requests.next().await;
|
||||
cx.foreground().run_until_parked();
|
||||
|
|
|
@ -16,13 +16,13 @@ use crate::{
|
|||
Anchor, DisplayPoint, ExcerptId, MultiBuffer, MultiBufferSnapshot, SelectMode, ToOffset,
|
||||
};
|
||||
|
||||
#[derive(Clone)]
|
||||
#[derive(Debug, Clone)]
|
||||
pub struct PendingSelection {
|
||||
pub selection: Selection<Anchor>,
|
||||
pub mode: SelectMode,
|
||||
}
|
||||
|
||||
#[derive(Clone)]
|
||||
#[derive(Debug, Clone)]
|
||||
pub struct SelectionsCollection {
|
||||
display_map: ModelHandle<DisplayMap>,
|
||||
buffer: ModelHandle<MultiBuffer>,
|
||||
|
|
|
@ -362,8 +362,13 @@ impl Item for FeedbackEditor {
|
|||
impl SearchableItem for FeedbackEditor {
|
||||
type Match = Range<Anchor>;
|
||||
|
||||
fn to_search_event(event: &Self::Event) -> Option<workspace::searchable::SearchEvent> {
|
||||
Editor::to_search_event(event)
|
||||
fn to_search_event(
|
||||
&mut self,
|
||||
event: &Self::Event,
|
||||
cx: &mut ViewContext<Self>,
|
||||
) -> Option<workspace::searchable::SearchEvent> {
|
||||
self.editor
|
||||
.update(cx, |editor, cx| editor.to_search_event(event, cx))
|
||||
}
|
||||
|
||||
fn clear_matches(&mut self, cx: &mut ViewContext<Self>) {
|
||||
|
@ -391,6 +396,11 @@ impl SearchableItem for FeedbackEditor {
|
|||
.update(cx, |editor, cx| editor.activate_match(index, matches, cx))
|
||||
}
|
||||
|
||||
fn select_matches(&mut self, matches: Vec<Self::Match>, cx: &mut ViewContext<Self>) {
|
||||
self.editor
|
||||
.update(cx, |e, cx| e.select_matches(matches, cx))
|
||||
}
|
||||
|
||||
fn find_matches(
|
||||
&mut self,
|
||||
query: project::search::SearchQuery,
|
||||
|
|
|
@ -442,53 +442,71 @@ impl PickerDelegate for FileFinderDelegate {
|
|||
}
|
||||
}
|
||||
|
||||
fn confirm(&mut self, cx: &mut ViewContext<FileFinder>) {
|
||||
fn confirm(&mut self, secondary: bool, cx: &mut ViewContext<FileFinder>) {
|
||||
if let Some(m) = self.matches.get(self.selected_index()) {
|
||||
if let Some(workspace) = self.workspace.upgrade(cx) {
|
||||
let open_task = workspace.update(cx, |workspace, cx| match m {
|
||||
Match::History(history_match) => {
|
||||
let worktree_id = history_match.project.worktree_id;
|
||||
if workspace
|
||||
.project()
|
||||
.read(cx)
|
||||
.worktree_for_id(worktree_id, cx)
|
||||
.is_some()
|
||||
{
|
||||
workspace.open_path(
|
||||
ProjectPath {
|
||||
worktree_id,
|
||||
path: Arc::clone(&history_match.project.path),
|
||||
},
|
||||
None,
|
||||
true,
|
||||
cx,
|
||||
)
|
||||
let open_task = workspace.update(cx, move |workspace, cx| {
|
||||
let split_or_open = |workspace: &mut Workspace, project_path, cx| {
|
||||
if secondary {
|
||||
workspace.split_path(project_path, cx)
|
||||
} else {
|
||||
match history_match.absolute.as_ref() {
|
||||
Some(abs_path) => {
|
||||
workspace.open_abs_path(abs_path.to_path_buf(), false, cx)
|
||||
}
|
||||
None => workspace.open_path(
|
||||
workspace.open_path(project_path, None, true, cx)
|
||||
}
|
||||
};
|
||||
match m {
|
||||
Match::History(history_match) => {
|
||||
let worktree_id = history_match.project.worktree_id;
|
||||
if workspace
|
||||
.project()
|
||||
.read(cx)
|
||||
.worktree_for_id(worktree_id, cx)
|
||||
.is_some()
|
||||
{
|
||||
split_or_open(
|
||||
workspace,
|
||||
ProjectPath {
|
||||
worktree_id,
|
||||
path: Arc::clone(&history_match.project.path),
|
||||
},
|
||||
None,
|
||||
true,
|
||||
cx,
|
||||
),
|
||||
)
|
||||
} else {
|
||||
match history_match.absolute.as_ref() {
|
||||
Some(abs_path) => {
|
||||
if secondary {
|
||||
workspace.split_abs_path(
|
||||
abs_path.to_path_buf(),
|
||||
false,
|
||||
cx,
|
||||
)
|
||||
} else {
|
||||
workspace.open_abs_path(
|
||||
abs_path.to_path_buf(),
|
||||
false,
|
||||
cx,
|
||||
)
|
||||
}
|
||||
}
|
||||
None => split_or_open(
|
||||
workspace,
|
||||
ProjectPath {
|
||||
worktree_id,
|
||||
path: Arc::clone(&history_match.project.path),
|
||||
},
|
||||
cx,
|
||||
),
|
||||
}
|
||||
}
|
||||
}
|
||||
Match::Search(m) => split_or_open(
|
||||
workspace,
|
||||
ProjectPath {
|
||||
worktree_id: WorktreeId::from_usize(m.worktree_id),
|
||||
path: m.path.clone(),
|
||||
},
|
||||
cx,
|
||||
),
|
||||
}
|
||||
Match::Search(m) => workspace.open_path(
|
||||
ProjectPath {
|
||||
worktree_id: WorktreeId::from_usize(m.worktree_id),
|
||||
path: m.path.clone(),
|
||||
},
|
||||
None,
|
||||
true,
|
||||
cx,
|
||||
),
|
||||
});
|
||||
|
||||
let row = self
|
||||
|
|
|
@ -33,12 +33,16 @@ pub trait GitRepository: Send {
|
|||
fn statuses(&self) -> Option<TreeMap<RepoPath, GitFileStatus>>;
|
||||
|
||||
fn status(&self, path: &RepoPath) -> Result<Option<GitFileStatus>>;
|
||||
|
||||
fn branches(&self) -> Result<Vec<Branch>> {
|
||||
Ok(vec![])
|
||||
}
|
||||
fn change_branch(&self, _: &str) -> Result<()> {
|
||||
Ok(())
|
||||
}
|
||||
fn create_branch(&self, _: &str) -> Result<()> {
|
||||
Ok(())
|
||||
}
|
||||
}
|
||||
|
||||
impl std::fmt::Debug for dyn GitRepository {
|
||||
|
@ -152,6 +156,12 @@ impl GitRepository for LibGitRepository {
|
|||
)?;
|
||||
Ok(())
|
||||
}
|
||||
fn create_branch(&self, name: &str) -> Result<()> {
|
||||
let current_commit = self.head()?.peel_to_commit()?;
|
||||
self.branch(name, ¤t_commit, false)?;
|
||||
|
||||
Ok(())
|
||||
}
|
||||
}
|
||||
|
||||
fn read_status(status: git2::Status) -> Option<GitFileStatus> {
|
||||
|
|
|
@ -1268,6 +1268,19 @@ impl Vector2FExt for Vector2F {
|
|||
}
|
||||
}
|
||||
|
||||
pub trait RectFExt {
|
||||
fn length_along(self, axis: Axis) -> f32;
|
||||
}
|
||||
|
||||
impl RectFExt for RectF {
|
||||
fn length_along(self, axis: Axis) -> f32 {
|
||||
match axis {
|
||||
Axis::Horizontal => self.width(),
|
||||
Axis::Vertical => self.height(),
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
#[derive(Copy, Clone, Debug)]
|
||||
pub struct SizeConstraint {
|
||||
pub min: Vector2F,
|
||||
|
|
|
@ -27,7 +27,7 @@ pub mod json;
|
|||
pub mod keymap_matcher;
|
||||
pub mod platform;
|
||||
pub use gpui_macros::{test, Element};
|
||||
pub use window::{Axis, SizeConstraint, Vector2FExt, WindowContext};
|
||||
pub use window::{Axis, RectFExt, SizeConstraint, Vector2FExt, WindowContext};
|
||||
|
||||
pub use anyhow;
|
||||
pub use serde_json;
|
||||
|
|
|
@ -46,7 +46,6 @@ lazy_static.workspace = true
|
|||
log.workspace = true
|
||||
parking_lot.workspace = true
|
||||
postage.workspace = true
|
||||
rand = { workspace = true, optional = true }
|
||||
regex.workspace = true
|
||||
schemars.workspace = true
|
||||
serde.workspace = true
|
||||
|
@ -56,10 +55,12 @@ similar = "1.3"
|
|||
smallvec.workspace = true
|
||||
smol.workspace = true
|
||||
tree-sitter.workspace = true
|
||||
tree-sitter-rust = { version = "*", optional = true }
|
||||
tree-sitter-typescript = { version = "*", optional = true }
|
||||
unicase = "2.6"
|
||||
|
||||
rand = { workspace = true, optional = true }
|
||||
tree-sitter-rust = { workspace = true, optional = true }
|
||||
tree-sitter-typescript = { workspace = true, optional = true }
|
||||
|
||||
[dev-dependencies]
|
||||
client = { path = "../client", features = ["test-support"] }
|
||||
collections = { path = "../collections", features = ["test-support"] }
|
||||
|
@ -74,12 +75,13 @@ indoc.workspace = true
|
|||
rand.workspace = true
|
||||
unindent.workspace = true
|
||||
|
||||
tree-sitter-embedded-template = "*"
|
||||
tree-sitter-html = "*"
|
||||
tree-sitter-javascript = "*"
|
||||
tree-sitter-json = "*"
|
||||
tree-sitter-markdown = { git = "https://github.com/MDeiml/tree-sitter-markdown", rev = "330ecab87a3e3a7211ac69bbadc19eabecdb1cca" }
|
||||
tree-sitter-rust = "*"
|
||||
tree-sitter-python = "*"
|
||||
tree-sitter-typescript = "*"
|
||||
tree-sitter-ruby = "*"
|
||||
tree-sitter-embedded-template.workspace = true
|
||||
tree-sitter-html.workspace = true
|
||||
tree-sitter-json.workspace = true
|
||||
tree-sitter-markdown.workspace = true
|
||||
tree-sitter-rust.workspace = true
|
||||
tree-sitter-python.workspace = true
|
||||
tree-sitter-typescript.workspace = true
|
||||
tree-sitter-ruby.workspace = true
|
||||
tree-sitter-elixir.workspace = true
|
||||
tree-sitter-heex.workspace = true
|
||||
|
|
|
@ -2145,23 +2145,27 @@ impl BufferSnapshot {
|
|||
|
||||
pub fn language_scope_at<D: ToOffset>(&self, position: D) -> Option<LanguageScope> {
|
||||
let offset = position.to_offset(self);
|
||||
let mut range = 0..self.len();
|
||||
let mut scope = self.language.clone().map(|language| LanguageScope {
|
||||
language,
|
||||
override_id: None,
|
||||
});
|
||||
|
||||
if let Some(layer_info) = self
|
||||
.syntax
|
||||
.layers_for_range(offset..offset, &self.text)
|
||||
.filter(|l| l.node().end_byte() > offset)
|
||||
.last()
|
||||
{
|
||||
Some(LanguageScope {
|
||||
language: layer_info.language.clone(),
|
||||
override_id: layer_info.override_id(offset, &self.text),
|
||||
})
|
||||
} else {
|
||||
self.language.clone().map(|language| LanguageScope {
|
||||
language,
|
||||
override_id: None,
|
||||
})
|
||||
// Use the layer that has the smallest node intersecting the given point.
|
||||
for layer in self.syntax.layers_for_range(offset..offset, &self.text) {
|
||||
let mut cursor = layer.node().walk();
|
||||
while cursor.goto_first_child_for_byte(offset).is_some() {}
|
||||
let node_range = cursor.node().byte_range();
|
||||
if node_range.to_inclusive().contains(&offset) && node_range.len() < range.len() {
|
||||
range = node_range;
|
||||
scope = Some(LanguageScope {
|
||||
language: layer.language.clone(),
|
||||
override_id: layer.override_id(offset, &self.text),
|
||||
});
|
||||
}
|
||||
}
|
||||
|
||||
scope
|
||||
}
|
||||
|
||||
pub fn surrounding_word<T: ToOffset>(&self, start: T) -> (Range<usize>, Option<CharKind>) {
|
||||
|
|
|
@ -1533,47 +1533,9 @@ fn test_autoindent_with_injected_languages(cx: &mut AppContext) {
|
|||
])
|
||||
});
|
||||
|
||||
let html_language = Arc::new(
|
||||
Language::new(
|
||||
LanguageConfig {
|
||||
name: "HTML".into(),
|
||||
..Default::default()
|
||||
},
|
||||
Some(tree_sitter_html::language()),
|
||||
)
|
||||
.with_indents_query(
|
||||
"
|
||||
(element
|
||||
(start_tag) @start
|
||||
(end_tag)? @end) @indent
|
||||
",
|
||||
)
|
||||
.unwrap()
|
||||
.with_injection_query(
|
||||
r#"
|
||||
(script_element
|
||||
(raw_text) @content
|
||||
(#set! "language" "javascript"))
|
||||
"#,
|
||||
)
|
||||
.unwrap(),
|
||||
);
|
||||
let html_language = Arc::new(html_lang());
|
||||
|
||||
let javascript_language = Arc::new(
|
||||
Language::new(
|
||||
LanguageConfig {
|
||||
name: "JavaScript".into(),
|
||||
..Default::default()
|
||||
},
|
||||
Some(tree_sitter_javascript::language()),
|
||||
)
|
||||
.with_indents_query(
|
||||
r#"
|
||||
(object "}" @end) @indent
|
||||
"#,
|
||||
)
|
||||
.unwrap(),
|
||||
);
|
||||
let javascript_language = Arc::new(javascript_lang());
|
||||
|
||||
let language_registry = Arc::new(LanguageRegistry::test());
|
||||
language_registry.add(html_language.clone());
|
||||
|
@ -1669,7 +1631,7 @@ fn test_autoindent_query_with_outdent_captures(cx: &mut AppContext) {
|
|||
}
|
||||
|
||||
#[gpui::test]
|
||||
fn test_language_config_at(cx: &mut AppContext) {
|
||||
fn test_language_scope_at(cx: &mut AppContext) {
|
||||
init_settings(cx, |_| {});
|
||||
|
||||
cx.add_model(|cx| {
|
||||
|
@ -1709,7 +1671,7 @@ fn test_language_config_at(cx: &mut AppContext) {
|
|||
.collect(),
|
||||
..Default::default()
|
||||
},
|
||||
Some(tree_sitter_javascript::language()),
|
||||
Some(tree_sitter_typescript::language_tsx()),
|
||||
)
|
||||
.with_override_query(
|
||||
r#"
|
||||
|
@ -1756,6 +1718,54 @@ fn test_language_config_at(cx: &mut AppContext) {
|
|||
});
|
||||
}
|
||||
|
||||
#[gpui::test]
|
||||
fn test_language_scope_at_with_combined_injections(cx: &mut AppContext) {
|
||||
init_settings(cx, |_| {});
|
||||
|
||||
cx.add_model(|cx| {
|
||||
let text = r#"
|
||||
<ol>
|
||||
<% people.each do |person| %>
|
||||
<li>
|
||||
<%= person.name %>
|
||||
</li>
|
||||
<% end %>
|
||||
</ol>
|
||||
"#
|
||||
.unindent();
|
||||
|
||||
let language_registry = Arc::new(LanguageRegistry::test());
|
||||
language_registry.add(Arc::new(ruby_lang()));
|
||||
language_registry.add(Arc::new(html_lang()));
|
||||
language_registry.add(Arc::new(erb_lang()));
|
||||
|
||||
let mut buffer = Buffer::new(0, text, cx);
|
||||
buffer.set_language_registry(language_registry.clone());
|
||||
buffer.set_language(
|
||||
language_registry
|
||||
.language_for_name("ERB")
|
||||
.now_or_never()
|
||||
.unwrap()
|
||||
.ok(),
|
||||
cx,
|
||||
);
|
||||
|
||||
let snapshot = buffer.snapshot();
|
||||
let html_config = snapshot.language_scope_at(Point::new(2, 4)).unwrap();
|
||||
assert_eq!(html_config.line_comment_prefix(), None);
|
||||
assert_eq!(
|
||||
html_config.block_comment_delimiters(),
|
||||
Some((&"<!--".into(), &"-->".into()))
|
||||
);
|
||||
|
||||
let ruby_config = snapshot.language_scope_at(Point::new(3, 12)).unwrap();
|
||||
assert_eq!(ruby_config.line_comment_prefix().unwrap().as_ref(), "# ");
|
||||
assert_eq!(ruby_config.block_comment_delimiters(), None);
|
||||
|
||||
buffer
|
||||
});
|
||||
}
|
||||
|
||||
#[gpui::test]
|
||||
fn test_serialization(cx: &mut gpui::AppContext) {
|
||||
let mut now = Instant::now();
|
||||
|
@ -2143,6 +2153,7 @@ fn ruby_lang() -> Language {
|
|||
LanguageConfig {
|
||||
name: "Ruby".into(),
|
||||
path_suffixes: vec!["rb".to_string()],
|
||||
line_comment: Some("# ".into()),
|
||||
..Default::default()
|
||||
},
|
||||
Some(tree_sitter_ruby::language()),
|
||||
|
@ -2158,6 +2169,61 @@ fn ruby_lang() -> Language {
|
|||
.unwrap()
|
||||
}
|
||||
|
||||
fn html_lang() -> Language {
|
||||
Language::new(
|
||||
LanguageConfig {
|
||||
name: "HTML".into(),
|
||||
block_comment: Some(("<!--".into(), "-->".into())),
|
||||
..Default::default()
|
||||
},
|
||||
Some(tree_sitter_html::language()),
|
||||
)
|
||||
.with_indents_query(
|
||||
"
|
||||
(element
|
||||
(start_tag) @start
|
||||
(end_tag)? @end) @indent
|
||||
",
|
||||
)
|
||||
.unwrap()
|
||||
.with_injection_query(
|
||||
r#"
|
||||
(script_element
|
||||
(raw_text) @content
|
||||
(#set! "language" "javascript"))
|
||||
"#,
|
||||
)
|
||||
.unwrap()
|
||||
}
|
||||
|
||||
fn erb_lang() -> Language {
|
||||
Language::new(
|
||||
LanguageConfig {
|
||||
name: "ERB".into(),
|
||||
path_suffixes: vec!["erb".to_string()],
|
||||
block_comment: Some(("<%#".into(), "%>".into())),
|
||||
..Default::default()
|
||||
},
|
||||
Some(tree_sitter_embedded_template::language()),
|
||||
)
|
||||
.with_injection_query(
|
||||
r#"
|
||||
(
|
||||
(code) @content
|
||||
(#set! "language" "ruby")
|
||||
(#set! "combined")
|
||||
)
|
||||
|
||||
(
|
||||
(content) @content
|
||||
(#set! "language" "html")
|
||||
(#set! "combined")
|
||||
)
|
||||
"#,
|
||||
)
|
||||
.unwrap()
|
||||
}
|
||||
|
||||
fn rust_lang() -> Language {
|
||||
Language::new(
|
||||
LanguageConfig {
|
||||
|
@ -2227,7 +2293,7 @@ fn javascript_lang() -> Language {
|
|||
name: "JavaScript".into(),
|
||||
..Default::default()
|
||||
},
|
||||
Some(tree_sitter_javascript::language()),
|
||||
Some(tree_sitter_typescript::language_tsx()),
|
||||
)
|
||||
.with_brackets_query(
|
||||
r#"
|
||||
|
@ -2236,6 +2302,12 @@ fn javascript_lang() -> Language {
|
|||
"#,
|
||||
)
|
||||
.unwrap()
|
||||
.with_indents_query(
|
||||
r#"
|
||||
(object "}" @end) @indent
|
||||
"#,
|
||||
)
|
||||
.unwrap()
|
||||
}
|
||||
|
||||
fn get_tree_sexp(buffer: &ModelHandle<Buffer>, cx: &gpui::TestAppContext) -> String {
|
||||
|
|
|
@ -357,6 +357,7 @@ pub struct LanguageQueries {
|
|||
pub brackets: Option<Cow<'static, str>>,
|
||||
pub indents: Option<Cow<'static, str>>,
|
||||
pub outline: Option<Cow<'static, str>>,
|
||||
pub embedding: Option<Cow<'static, str>>,
|
||||
pub injections: Option<Cow<'static, str>>,
|
||||
pub overrides: Option<Cow<'static, str>>,
|
||||
}
|
||||
|
@ -434,6 +435,7 @@ fn deserialize_regex<'de, D: Deserializer<'de>>(d: D) -> Result<Option<Regex>, D
|
|||
#[cfg(any(test, feature = "test-support"))]
|
||||
pub struct FakeLspAdapter {
|
||||
pub name: &'static str,
|
||||
pub initialization_options: Option<Value>,
|
||||
pub capabilities: lsp::ServerCapabilities,
|
||||
pub initializer: Option<Box<dyn 'static + Send + Sync + Fn(&mut lsp::FakeLanguageServer)>>,
|
||||
pub disk_based_diagnostics_progress_token: Option<String>,
|
||||
|
@ -496,12 +498,13 @@ pub struct Language {
|
|||
|
||||
pub struct Grammar {
|
||||
id: usize,
|
||||
pub(crate) ts_language: tree_sitter::Language,
|
||||
pub ts_language: tree_sitter::Language,
|
||||
pub(crate) error_query: Query,
|
||||
pub(crate) highlights_query: Option<Query>,
|
||||
pub(crate) brackets_config: Option<BracketConfig>,
|
||||
pub(crate) indents_config: Option<IndentConfig>,
|
||||
pub(crate) outline_config: Option<OutlineConfig>,
|
||||
pub outline_config: Option<OutlineConfig>,
|
||||
pub embedding_config: Option<EmbeddingConfig>,
|
||||
pub(crate) injection_config: Option<InjectionConfig>,
|
||||
pub(crate) override_config: Option<OverrideConfig>,
|
||||
pub(crate) highlight_map: Mutex<HighlightMap>,
|
||||
|
@ -515,12 +518,21 @@ struct IndentConfig {
|
|||
outdent_capture_ix: Option<u32>,
|
||||
}
|
||||
|
||||
struct OutlineConfig {
|
||||
query: Query,
|
||||
item_capture_ix: u32,
|
||||
name_capture_ix: u32,
|
||||
context_capture_ix: Option<u32>,
|
||||
extra_context_capture_ix: Option<u32>,
|
||||
pub struct OutlineConfig {
|
||||
pub query: Query,
|
||||
pub item_capture_ix: u32,
|
||||
pub name_capture_ix: u32,
|
||||
pub context_capture_ix: Option<u32>,
|
||||
pub extra_context_capture_ix: Option<u32>,
|
||||
}
|
||||
|
||||
#[derive(Debug)]
|
||||
pub struct EmbeddingConfig {
|
||||
pub query: Query,
|
||||
pub item_capture_ix: u32,
|
||||
pub name_capture_ix: u32,
|
||||
pub context_capture_ix: Option<u32>,
|
||||
pub extra_context_capture_ix: Option<u32>,
|
||||
}
|
||||
|
||||
struct InjectionConfig {
|
||||
|
@ -1156,6 +1168,7 @@ impl Language {
|
|||
highlights_query: None,
|
||||
brackets_config: None,
|
||||
outline_config: None,
|
||||
embedding_config: None,
|
||||
indents_config: None,
|
||||
injection_config: None,
|
||||
override_config: None,
|
||||
|
@ -1192,6 +1205,9 @@ impl Language {
|
|||
if let Some(query) = queries.outline {
|
||||
self = self.with_outline_query(query.as_ref())?;
|
||||
}
|
||||
if let Some(query) = queries.embedding {
|
||||
self = self.with_embedding_query(query.as_ref())?;
|
||||
}
|
||||
if let Some(query) = queries.injections {
|
||||
self = self.with_injection_query(query.as_ref())?;
|
||||
}
|
||||
|
@ -1200,6 +1216,7 @@ impl Language {
|
|||
}
|
||||
Ok(self)
|
||||
}
|
||||
|
||||
pub fn with_highlights_query(mut self, source: &str) -> Result<Self> {
|
||||
let grammar = self.grammar_mut();
|
||||
grammar.highlights_query = Some(Query::new(grammar.ts_language, source)?);
|
||||
|
@ -1234,6 +1251,34 @@ impl Language {
|
|||
Ok(self)
|
||||
}
|
||||
|
||||
pub fn with_embedding_query(mut self, source: &str) -> Result<Self> {
|
||||
let grammar = self.grammar_mut();
|
||||
let query = Query::new(grammar.ts_language, source)?;
|
||||
let mut item_capture_ix = None;
|
||||
let mut name_capture_ix = None;
|
||||
let mut context_capture_ix = None;
|
||||
let mut extra_context_capture_ix = None;
|
||||
get_capture_indices(
|
||||
&query,
|
||||
&mut [
|
||||
("item", &mut item_capture_ix),
|
||||
("name", &mut name_capture_ix),
|
||||
("context", &mut context_capture_ix),
|
||||
("context.extra", &mut extra_context_capture_ix),
|
||||
],
|
||||
);
|
||||
if let Some((item_capture_ix, name_capture_ix)) = item_capture_ix.zip(name_capture_ix) {
|
||||
grammar.embedding_config = Some(EmbeddingConfig {
|
||||
query,
|
||||
item_capture_ix,
|
||||
name_capture_ix,
|
||||
context_capture_ix,
|
||||
extra_context_capture_ix,
|
||||
});
|
||||
}
|
||||
Ok(self)
|
||||
}
|
||||
|
||||
pub fn with_brackets_query(mut self, source: &str) -> Result<Self> {
|
||||
let grammar = self.grammar_mut();
|
||||
let query = Query::new(grammar.ts_language, source)?;
|
||||
|
@ -1648,6 +1693,7 @@ impl Default for FakeLspAdapter {
|
|||
capabilities: lsp::LanguageServer::full_capabilities(),
|
||||
initializer: None,
|
||||
disk_based_diagnostics_progress_token: None,
|
||||
initialization_options: None,
|
||||
disk_based_diagnostics_sources: Vec::new(),
|
||||
}
|
||||
}
|
||||
|
@ -1697,6 +1743,10 @@ impl LspAdapter for Arc<FakeLspAdapter> {
|
|||
async fn disk_based_diagnostics_progress_token(&self) -> Option<String> {
|
||||
self.disk_based_diagnostics_progress_token.clone()
|
||||
}
|
||||
|
||||
async fn initialization_options(&self) -> Option<Value> {
|
||||
self.initialization_options.clone()
|
||||
}
|
||||
}
|
||||
|
||||
fn get_capture_indices(query: &Query, captures: &mut [(&str, &mut Option<u32>)]) {
|
||||
|
@ -1752,7 +1802,7 @@ mod tests {
|
|||
first_line_pattern: Some(Regex::new(r"\bnode\b").unwrap()),
|
||||
..Default::default()
|
||||
},
|
||||
tree_sitter_javascript::language(),
|
||||
tree_sitter_typescript::language_tsx(),
|
||||
vec![],
|
||||
|_| Default::default(),
|
||||
);
|
||||
|
|
|
@ -569,11 +569,19 @@ impl SyntaxSnapshot {
|
|||
range.end = range.end.saturating_sub(step_start_byte);
|
||||
}
|
||||
|
||||
included_ranges = splice_included_ranges(
|
||||
let changed_indices;
|
||||
(included_ranges, changed_indices) = splice_included_ranges(
|
||||
old_tree.included_ranges(),
|
||||
&parent_layer_changed_ranges,
|
||||
&included_ranges,
|
||||
);
|
||||
insert_newlines_between_ranges(
|
||||
changed_indices,
|
||||
&mut included_ranges,
|
||||
&text,
|
||||
step_start_byte,
|
||||
step_start_point,
|
||||
);
|
||||
}
|
||||
|
||||
if included_ranges.is_empty() {
|
||||
|
@ -586,7 +594,7 @@ impl SyntaxSnapshot {
|
|||
}
|
||||
|
||||
log::trace!(
|
||||
"update layer. language:{}, start:{:?}, ranges:{:?}",
|
||||
"update layer. language:{}, start:{:?}, included_ranges:{:?}",
|
||||
language.name(),
|
||||
LogAnchorRange(&step.range, text),
|
||||
LogIncludedRanges(&included_ranges),
|
||||
|
@ -608,6 +616,16 @@ impl SyntaxSnapshot {
|
|||
}),
|
||||
);
|
||||
} else {
|
||||
if matches!(step.mode, ParseMode::Combined { .. }) {
|
||||
insert_newlines_between_ranges(
|
||||
0..included_ranges.len(),
|
||||
&mut included_ranges,
|
||||
text,
|
||||
step_start_byte,
|
||||
step_start_point,
|
||||
);
|
||||
}
|
||||
|
||||
if included_ranges.is_empty() {
|
||||
included_ranges.push(tree_sitter::Range {
|
||||
start_byte: 0,
|
||||
|
@ -771,8 +789,10 @@ impl SyntaxSnapshot {
|
|||
range: Range<T>,
|
||||
buffer: &'a BufferSnapshot,
|
||||
) -> impl 'a + Iterator<Item = SyntaxLayerInfo> {
|
||||
let start = buffer.anchor_before(range.start.to_offset(buffer));
|
||||
let end = buffer.anchor_after(range.end.to_offset(buffer));
|
||||
let start_offset = range.start.to_offset(buffer);
|
||||
let end_offset = range.end.to_offset(buffer);
|
||||
let start = buffer.anchor_before(start_offset);
|
||||
let end = buffer.anchor_after(end_offset);
|
||||
|
||||
let mut cursor = self.layers.filter::<_, ()>(move |summary| {
|
||||
if summary.max_depth > summary.min_depth {
|
||||
|
@ -787,20 +807,21 @@ impl SyntaxSnapshot {
|
|||
cursor.next(buffer);
|
||||
iter::from_fn(move || {
|
||||
while let Some(layer) = cursor.item() {
|
||||
let mut info = None;
|
||||
if let SyntaxLayerContent::Parsed { tree, language } = &layer.content {
|
||||
let info = SyntaxLayerInfo {
|
||||
let layer_start_offset = layer.range.start.to_offset(buffer);
|
||||
let layer_start_point = layer.range.start.to_point(buffer).to_ts_point();
|
||||
|
||||
info = Some(SyntaxLayerInfo {
|
||||
tree,
|
||||
language,
|
||||
depth: layer.depth,
|
||||
offset: (
|
||||
layer.range.start.to_offset(buffer),
|
||||
layer.range.start.to_point(buffer).to_ts_point(),
|
||||
),
|
||||
};
|
||||
cursor.next(buffer);
|
||||
return Some(info);
|
||||
} else {
|
||||
cursor.next(buffer);
|
||||
offset: (layer_start_offset, layer_start_point),
|
||||
});
|
||||
}
|
||||
cursor.next(buffer);
|
||||
if info.is_some() {
|
||||
return info;
|
||||
}
|
||||
}
|
||||
None
|
||||
|
@ -1272,14 +1293,20 @@ fn get_injections(
|
|||
}
|
||||
}
|
||||
|
||||
/// Update the given list of included `ranges`, removing any ranges that intersect
|
||||
/// `removed_ranges`, and inserting the given `new_ranges`.
|
||||
///
|
||||
/// Returns a new vector of ranges, and the range of the vector that was changed,
|
||||
/// from the previous `ranges` vector.
|
||||
pub(crate) fn splice_included_ranges(
|
||||
mut ranges: Vec<tree_sitter::Range>,
|
||||
removed_ranges: &[Range<usize>],
|
||||
new_ranges: &[tree_sitter::Range],
|
||||
) -> Vec<tree_sitter::Range> {
|
||||
) -> (Vec<tree_sitter::Range>, Range<usize>) {
|
||||
let mut removed_ranges = removed_ranges.iter().cloned().peekable();
|
||||
let mut new_ranges = new_ranges.into_iter().cloned().peekable();
|
||||
let mut ranges_ix = 0;
|
||||
let mut changed_portion = usize::MAX..0;
|
||||
loop {
|
||||
let next_new_range = new_ranges.peek();
|
||||
let next_removed_range = removed_ranges.peek();
|
||||
|
@ -1341,11 +1368,69 @@ pub(crate) fn splice_included_ranges(
|
|||
}
|
||||
}
|
||||
|
||||
changed_portion.start = changed_portion.start.min(start_ix);
|
||||
changed_portion.end = changed_portion.end.max(if insert.is_some() {
|
||||
start_ix + 1
|
||||
} else {
|
||||
start_ix
|
||||
});
|
||||
|
||||
ranges.splice(start_ix..end_ix, insert);
|
||||
ranges_ix = start_ix;
|
||||
}
|
||||
|
||||
ranges
|
||||
if changed_portion.end < changed_portion.start {
|
||||
changed_portion = 0..0;
|
||||
}
|
||||
|
||||
(ranges, changed_portion)
|
||||
}
|
||||
|
||||
/// Ensure there are newline ranges in between content range that appear on
|
||||
/// different lines. For performance, only iterate through the given range of
|
||||
/// indices. All of the ranges in the array are relative to a given start byte
|
||||
/// and point.
|
||||
fn insert_newlines_between_ranges(
|
||||
indices: Range<usize>,
|
||||
ranges: &mut Vec<tree_sitter::Range>,
|
||||
text: &text::BufferSnapshot,
|
||||
start_byte: usize,
|
||||
start_point: Point,
|
||||
) {
|
||||
let mut ix = indices.end + 1;
|
||||
while ix > indices.start {
|
||||
ix -= 1;
|
||||
if 0 == ix || ix == ranges.len() {
|
||||
continue;
|
||||
}
|
||||
|
||||
let range_b = ranges[ix].clone();
|
||||
let range_a = &mut ranges[ix - 1];
|
||||
if range_a.end_point.column == 0 {
|
||||
continue;
|
||||
}
|
||||
|
||||
if range_a.end_point.row < range_b.start_point.row {
|
||||
let end_point = start_point + Point::from_ts_point(range_a.end_point);
|
||||
let line_end = Point::new(end_point.row, text.line_len(end_point.row));
|
||||
if end_point.column as u32 >= line_end.column {
|
||||
range_a.end_byte += 1;
|
||||
range_a.end_point.row += 1;
|
||||
range_a.end_point.column = 0;
|
||||
} else {
|
||||
let newline_offset = text.point_to_offset(line_end);
|
||||
ranges.insert(
|
||||
ix,
|
||||
tree_sitter::Range {
|
||||
start_byte: newline_offset - start_byte,
|
||||
end_byte: newline_offset - start_byte + 1,
|
||||
start_point: (line_end - start_point).to_ts_point(),
|
||||
end_point: ((line_end - start_point) + Point::new(1, 0)).to_ts_point(),
|
||||
},
|
||||
)
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
impl OwnedSyntaxLayerInfo {
|
||||
|
|
|
@ -11,7 +11,7 @@ use util::test::marked_text_ranges;
|
|||
fn test_splice_included_ranges() {
|
||||
let ranges = vec![ts_range(20..30), ts_range(50..60), ts_range(80..90)];
|
||||
|
||||
let new_ranges = splice_included_ranges(
|
||||
let (new_ranges, change) = splice_included_ranges(
|
||||
ranges.clone(),
|
||||
&[54..56, 58..68],
|
||||
&[ts_range(50..54), ts_range(59..67)],
|
||||
|
@ -25,14 +25,16 @@ fn test_splice_included_ranges() {
|
|||
ts_range(80..90),
|
||||
]
|
||||
);
|
||||
assert_eq!(change, 1..3);
|
||||
|
||||
let new_ranges = splice_included_ranges(ranges.clone(), &[70..71, 91..100], &[]);
|
||||
let (new_ranges, change) = splice_included_ranges(ranges.clone(), &[70..71, 91..100], &[]);
|
||||
assert_eq!(
|
||||
new_ranges,
|
||||
&[ts_range(20..30), ts_range(50..60), ts_range(80..90)]
|
||||
);
|
||||
assert_eq!(change, 2..3);
|
||||
|
||||
let new_ranges =
|
||||
let (new_ranges, change) =
|
||||
splice_included_ranges(ranges.clone(), &[], &[ts_range(0..2), ts_range(70..75)]);
|
||||
assert_eq!(
|
||||
new_ranges,
|
||||
|
@ -44,16 +46,21 @@ fn test_splice_included_ranges() {
|
|||
ts_range(80..90)
|
||||
]
|
||||
);
|
||||
assert_eq!(change, 0..4);
|
||||
|
||||
let new_ranges = splice_included_ranges(ranges.clone(), &[30..50], &[ts_range(25..55)]);
|
||||
let (new_ranges, change) =
|
||||
splice_included_ranges(ranges.clone(), &[30..50], &[ts_range(25..55)]);
|
||||
assert_eq!(new_ranges, &[ts_range(25..55), ts_range(80..90)]);
|
||||
assert_eq!(change, 0..1);
|
||||
|
||||
// does not create overlapping ranges
|
||||
let new_ranges = splice_included_ranges(ranges.clone(), &[0..18], &[ts_range(20..32)]);
|
||||
let (new_ranges, change) =
|
||||
splice_included_ranges(ranges.clone(), &[0..18], &[ts_range(20..32)]);
|
||||
assert_eq!(
|
||||
new_ranges,
|
||||
&[ts_range(20..32), ts_range(50..60), ts_range(80..90)]
|
||||
);
|
||||
assert_eq!(change, 0..1);
|
||||
|
||||
fn ts_range(range: Range<usize>) -> tree_sitter::Range {
|
||||
tree_sitter::Range {
|
||||
|
@ -511,7 +518,7 @@ fn test_removing_injection_by_replacing_across_boundary() {
|
|||
}
|
||||
|
||||
#[gpui::test]
|
||||
fn test_combined_injections() {
|
||||
fn test_combined_injections_simple() {
|
||||
let (buffer, syntax_map) = test_edit_sequence(
|
||||
"ERB",
|
||||
&[
|
||||
|
@ -653,33 +660,78 @@ fn test_combined_injections_editing_after_last_injection() {
|
|||
|
||||
#[gpui::test]
|
||||
fn test_combined_injections_inside_injections() {
|
||||
let (_buffer, _syntax_map) = test_edit_sequence(
|
||||
let (buffer, syntax_map) = test_edit_sequence(
|
||||
"Markdown",
|
||||
&[
|
||||
r#"
|
||||
here is some ERB code:
|
||||
here is
|
||||
some
|
||||
ERB code:
|
||||
|
||||
```erb
|
||||
<ul>
|
||||
<% people.each do |person| %>
|
||||
<li><%= person.name %></li>
|
||||
<li><%= person.age %></li>
|
||||
<% end %>
|
||||
</ul>
|
||||
```
|
||||
"#,
|
||||
r#"
|
||||
here is some ERB code:
|
||||
here is
|
||||
some
|
||||
ERB code:
|
||||
|
||||
```erb
|
||||
<ul>
|
||||
<% people«2».each do |person| %>
|
||||
<li><%= person.name %></li>
|
||||
<li><%= person.age %></li>
|
||||
<% end %>
|
||||
</ul>
|
||||
```
|
||||
"#,
|
||||
// Inserting a comment character inside one code directive
|
||||
// does not cause the other code directive to become a comment,
|
||||
// because newlines are included in between each injection range.
|
||||
r#"
|
||||
here is
|
||||
some
|
||||
ERB code:
|
||||
|
||||
```erb
|
||||
<ul>
|
||||
<% people2.each do |person| %>
|
||||
<li><%= «# »person.name %></li>
|
||||
<li><%= person.age %></li>
|
||||
<% end %>
|
||||
</ul>
|
||||
```
|
||||
"#,
|
||||
],
|
||||
);
|
||||
|
||||
// Check that the code directive below the ruby comment is
|
||||
// not parsed as a comment.
|
||||
assert_capture_ranges(
|
||||
&syntax_map,
|
||||
&buffer,
|
||||
&["method"],
|
||||
"
|
||||
here is
|
||||
some
|
||||
ERB code:
|
||||
|
||||
```erb
|
||||
<ul>
|
||||
<% people2.«each» do |person| %>
|
||||
<li><%= # person.name %></li>
|
||||
<li><%= person.«age» %></li>
|
||||
<% end %>
|
||||
</ul>
|
||||
```
|
||||
",
|
||||
);
|
||||
}
|
||||
|
||||
#[gpui::test]
|
||||
|
@ -711,11 +763,7 @@ fn test_empty_combined_injections_inside_injections() {
|
|||
}
|
||||
|
||||
#[gpui::test(iterations = 50)]
|
||||
fn test_random_syntax_map_edits(mut rng: StdRng) {
|
||||
let operations = env::var("OPERATIONS")
|
||||
.map(|i| i.parse().expect("invalid `OPERATIONS` variable"))
|
||||
.unwrap_or(10);
|
||||
|
||||
fn test_random_syntax_map_edits_rust_macros(rng: StdRng) {
|
||||
let text = r#"
|
||||
fn test_something() {
|
||||
let vec = vec![5, 1, 3, 8];
|
||||
|
@ -736,68 +784,12 @@ fn test_random_syntax_map_edits(mut rng: StdRng) {
|
|||
let registry = Arc::new(LanguageRegistry::test());
|
||||
let language = Arc::new(rust_lang());
|
||||
registry.add(language.clone());
|
||||
let mut buffer = Buffer::new(0, 0, text);
|
||||
|
||||
let mut syntax_map = SyntaxMap::new();
|
||||
syntax_map.set_language_registry(registry.clone());
|
||||
syntax_map.reparse(language.clone(), &buffer);
|
||||
|
||||
let mut reference_syntax_map = SyntaxMap::new();
|
||||
reference_syntax_map.set_language_registry(registry.clone());
|
||||
|
||||
log::info!("initial text:\n{}", buffer.text());
|
||||
|
||||
for _ in 0..operations {
|
||||
let prev_buffer = buffer.snapshot();
|
||||
let prev_syntax_map = syntax_map.snapshot();
|
||||
|
||||
buffer.randomly_edit(&mut rng, 3);
|
||||
log::info!("text:\n{}", buffer.text());
|
||||
|
||||
syntax_map.interpolate(&buffer);
|
||||
check_interpolation(&prev_syntax_map, &syntax_map, &prev_buffer, &buffer);
|
||||
|
||||
syntax_map.reparse(language.clone(), &buffer);
|
||||
|
||||
reference_syntax_map.clear();
|
||||
reference_syntax_map.reparse(language.clone(), &buffer);
|
||||
}
|
||||
|
||||
for i in 0..operations {
|
||||
let i = operations - i - 1;
|
||||
buffer.undo();
|
||||
log::info!("undoing operation {}", i);
|
||||
log::info!("text:\n{}", buffer.text());
|
||||
|
||||
syntax_map.interpolate(&buffer);
|
||||
syntax_map.reparse(language.clone(), &buffer);
|
||||
|
||||
reference_syntax_map.clear();
|
||||
reference_syntax_map.reparse(language.clone(), &buffer);
|
||||
assert_eq!(
|
||||
syntax_map.layers(&buffer).len(),
|
||||
reference_syntax_map.layers(&buffer).len(),
|
||||
"wrong number of layers after undoing edit {i}"
|
||||
);
|
||||
}
|
||||
|
||||
let layers = syntax_map.layers(&buffer);
|
||||
let reference_layers = reference_syntax_map.layers(&buffer);
|
||||
for (edited_layer, reference_layer) in layers.into_iter().zip(reference_layers.into_iter()) {
|
||||
assert_eq!(
|
||||
edited_layer.node().to_sexp(),
|
||||
reference_layer.node().to_sexp()
|
||||
);
|
||||
assert_eq!(edited_layer.node().range(), reference_layer.node().range());
|
||||
}
|
||||
test_random_edits(text, registry, language, rng);
|
||||
}
|
||||
|
||||
#[gpui::test(iterations = 50)]
|
||||
fn test_random_syntax_map_edits_with_combined_injections(mut rng: StdRng) {
|
||||
let operations = env::var("OPERATIONS")
|
||||
.map(|i| i.parse().expect("invalid `OPERATIONS` variable"))
|
||||
.unwrap_or(10);
|
||||
|
||||
fn test_random_syntax_map_edits_with_erb(rng: StdRng) {
|
||||
let text = r#"
|
||||
<div id="main">
|
||||
<% if one?(:two) %>
|
||||
|
@ -814,13 +806,60 @@ fn test_random_syntax_map_edits_with_combined_injections(mut rng: StdRng) {
|
|||
</div>
|
||||
"#
|
||||
.unindent()
|
||||
.repeat(8);
|
||||
.repeat(5);
|
||||
|
||||
let registry = Arc::new(LanguageRegistry::test());
|
||||
let language = Arc::new(erb_lang());
|
||||
registry.add(language.clone());
|
||||
registry.add(Arc::new(ruby_lang()));
|
||||
registry.add(Arc::new(html_lang()));
|
||||
|
||||
test_random_edits(text, registry, language, rng);
|
||||
}
|
||||
|
||||
#[gpui::test(iterations = 50)]
|
||||
fn test_random_syntax_map_edits_with_heex(rng: StdRng) {
|
||||
let text = r#"
|
||||
defmodule TheModule do
|
||||
def the_method(assigns) do
|
||||
~H"""
|
||||
<%= if @empty do %>
|
||||
<div class="h-4"></div>
|
||||
<% else %>
|
||||
<div class="max-w-2xl w-full animate-pulse">
|
||||
<div class="flex-1 space-y-4">
|
||||
<div class={[@bg_class, "h-4 rounded-lg w-3/4"]}></div>
|
||||
<div class={[@bg_class, "h-4 rounded-lg"]}></div>
|
||||
<div class={[@bg_class, "h-4 rounded-lg w-5/6"]}></div>
|
||||
</div>
|
||||
</div>
|
||||
<% end %>
|
||||
"""
|
||||
end
|
||||
end
|
||||
"#
|
||||
.unindent()
|
||||
.repeat(3);
|
||||
|
||||
let registry = Arc::new(LanguageRegistry::test());
|
||||
let language = Arc::new(elixir_lang());
|
||||
registry.add(language.clone());
|
||||
registry.add(Arc::new(heex_lang()));
|
||||
registry.add(Arc::new(html_lang()));
|
||||
|
||||
test_random_edits(text, registry, language, rng);
|
||||
}
|
||||
|
||||
fn test_random_edits(
|
||||
text: String,
|
||||
registry: Arc<LanguageRegistry>,
|
||||
language: Arc<Language>,
|
||||
mut rng: StdRng,
|
||||
) {
|
||||
let operations = env::var("OPERATIONS")
|
||||
.map(|i| i.parse().expect("invalid `OPERATIONS` variable"))
|
||||
.unwrap_or(10);
|
||||
|
||||
let mut buffer = Buffer::new(0, 0, text);
|
||||
|
||||
let mut syntax_map = SyntaxMap::new();
|
||||
|
@ -984,11 +1023,14 @@ fn check_interpolation(
|
|||
|
||||
fn test_edit_sequence(language_name: &str, steps: &[&str]) -> (Buffer, SyntaxMap) {
|
||||
let registry = Arc::new(LanguageRegistry::test());
|
||||
registry.add(Arc::new(elixir_lang()));
|
||||
registry.add(Arc::new(heex_lang()));
|
||||
registry.add(Arc::new(rust_lang()));
|
||||
registry.add(Arc::new(ruby_lang()));
|
||||
registry.add(Arc::new(html_lang()));
|
||||
registry.add(Arc::new(erb_lang()));
|
||||
registry.add(Arc::new(markdown_lang()));
|
||||
|
||||
let language = registry
|
||||
.language_for_name(language_name)
|
||||
.now_or_never()
|
||||
|
@ -1074,6 +1116,7 @@ fn ruby_lang() -> Language {
|
|||
r#"
|
||||
["if" "do" "else" "end"] @keyword
|
||||
(instance_variable) @ivar
|
||||
(call method: (identifier) @method)
|
||||
"#,
|
||||
)
|
||||
.unwrap()
|
||||
|
@ -1158,6 +1201,52 @@ fn markdown_lang() -> Language {
|
|||
.unwrap()
|
||||
}
|
||||
|
||||
fn elixir_lang() -> Language {
|
||||
Language::new(
|
||||
LanguageConfig {
|
||||
name: "Elixir".into(),
|
||||
path_suffixes: vec!["ex".into()],
|
||||
..Default::default()
|
||||
},
|
||||
Some(tree_sitter_elixir::language()),
|
||||
)
|
||||
.with_highlights_query(
|
||||
r#"
|
||||
|
||||
"#,
|
||||
)
|
||||
.unwrap()
|
||||
}
|
||||
|
||||
fn heex_lang() -> Language {
|
||||
Language::new(
|
||||
LanguageConfig {
|
||||
name: "HEEx".into(),
|
||||
path_suffixes: vec!["heex".into()],
|
||||
..Default::default()
|
||||
},
|
||||
Some(tree_sitter_heex::language()),
|
||||
)
|
||||
.with_injection_query(
|
||||
r#"
|
||||
(
|
||||
(directive
|
||||
[
|
||||
(partial_expression_value)
|
||||
(expression_value)
|
||||
(ending_expression_value)
|
||||
] @content)
|
||||
(#set! language "elixir")
|
||||
(#set! combined)
|
||||
)
|
||||
|
||||
((expression (expression_value) @content)
|
||||
(#set! language "elixir"))
|
||||
"#,
|
||||
)
|
||||
.unwrap()
|
||||
}
|
||||
|
||||
fn range_for_text(buffer: &Buffer, text: &str) -> Range<usize> {
|
||||
let start = buffer.as_rope().to_string().find(text).unwrap();
|
||||
start..start + text.len()
|
||||
|
|
|
@ -93,7 +93,7 @@ impl PickerDelegate for LanguageSelectorDelegate {
|
|||
self.matches.len()
|
||||
}
|
||||
|
||||
fn confirm(&mut self, cx: &mut ViewContext<Picker<Self>>) {
|
||||
fn confirm(&mut self, _: bool, cx: &mut ViewContext<Picker<Self>>) {
|
||||
if let Some(mat) = self.matches.get(self.selected_index) {
|
||||
let language_name = &self.candidates[mat.candidate_id].string;
|
||||
let language = self.language_registry.language_for_name(language_name);
|
||||
|
|
|
@ -467,8 +467,13 @@ impl Item for LspLogView {
|
|||
impl SearchableItem for LspLogView {
|
||||
type Match = <Editor as SearchableItem>::Match;
|
||||
|
||||
fn to_search_event(event: &Self::Event) -> Option<workspace::searchable::SearchEvent> {
|
||||
Editor::to_search_event(event)
|
||||
fn to_search_event(
|
||||
&mut self,
|
||||
event: &Self::Event,
|
||||
cx: &mut ViewContext<Self>,
|
||||
) -> Option<workspace::searchable::SearchEvent> {
|
||||
self.editor
|
||||
.update(cx, |editor, cx| editor.to_search_event(event, cx))
|
||||
}
|
||||
|
||||
fn clear_matches(&mut self, cx: &mut ViewContext<Self>) {
|
||||
|
@ -494,6 +499,11 @@ impl SearchableItem for LspLogView {
|
|||
.update(cx, |e, cx| e.activate_match(index, matches, cx))
|
||||
}
|
||||
|
||||
fn select_matches(&mut self, matches: Vec<Self::Match>, cx: &mut ViewContext<Self>) {
|
||||
self.editor
|
||||
.update(cx, |e, cx| e.select_matches(matches, cx))
|
||||
}
|
||||
|
||||
fn find_matches(
|
||||
&mut self,
|
||||
query: project::search::SearchQuery,
|
||||
|
|
|
@ -3,6 +3,7 @@ gpui::actions!(
|
|||
[
|
||||
Cancel,
|
||||
Confirm,
|
||||
SecondaryConfirm,
|
||||
SelectPrev,
|
||||
SelectNext,
|
||||
SelectFirst,
|
||||
|
|
|
@ -177,7 +177,7 @@ impl PickerDelegate for OutlineViewDelegate {
|
|||
Task::ready(())
|
||||
}
|
||||
|
||||
fn confirm(&mut self, cx: &mut ViewContext<OutlineView>) {
|
||||
fn confirm(&mut self, _: bool, cx: &mut ViewContext<OutlineView>) {
|
||||
self.prev_scroll_position.take();
|
||||
self.active_editor.update(cx, |active_editor, cx| {
|
||||
if let Some(rows) = active_editor.highlighted_rows() {
|
||||
|
|
|
@ -7,7 +7,7 @@ use gpui::{
|
|||
AnyElement, AnyViewHandle, AppContext, Axis, Entity, MouseState, Task, View, ViewContext,
|
||||
ViewHandle,
|
||||
};
|
||||
use menu::{Cancel, Confirm, SelectFirst, SelectLast, SelectNext, SelectPrev};
|
||||
use menu::{Cancel, Confirm, SecondaryConfirm, SelectFirst, SelectLast, SelectNext, SelectPrev};
|
||||
use parking_lot::Mutex;
|
||||
use std::{cmp, sync::Arc};
|
||||
use util::ResultExt;
|
||||
|
@ -34,7 +34,7 @@ pub trait PickerDelegate: Sized + 'static {
|
|||
fn selected_index(&self) -> usize;
|
||||
fn set_selected_index(&mut self, ix: usize, cx: &mut ViewContext<Picker<Self>>);
|
||||
fn update_matches(&mut self, query: String, cx: &mut ViewContext<Picker<Self>>) -> Task<()>;
|
||||
fn confirm(&mut self, cx: &mut ViewContext<Picker<Self>>);
|
||||
fn confirm(&mut self, secondary: bool, cx: &mut ViewContext<Picker<Self>>);
|
||||
fn dismissed(&mut self, cx: &mut ViewContext<Picker<Self>>);
|
||||
fn render_match(
|
||||
&self,
|
||||
|
@ -118,8 +118,8 @@ impl<D: PickerDelegate> View for Picker<D> {
|
|||
// Capture mouse events
|
||||
.on_down(MouseButton::Left, |_, _, _| {})
|
||||
.on_up(MouseButton::Left, |_, _, _| {})
|
||||
.on_click(MouseButton::Left, move |_, picker, cx| {
|
||||
picker.select_index(ix, cx);
|
||||
.on_click(MouseButton::Left, move |click, picker, cx| {
|
||||
picker.select_index(ix, click.cmd, cx);
|
||||
})
|
||||
.with_cursor_style(CursorStyle::PointingHand)
|
||||
.into_any()
|
||||
|
@ -175,6 +175,7 @@ impl<D: PickerDelegate> Picker<D> {
|
|||
cx.add_action(Self::select_next);
|
||||
cx.add_action(Self::select_prev);
|
||||
cx.add_action(Self::confirm);
|
||||
cx.add_action(Self::secondary_confirm);
|
||||
cx.add_action(Self::cancel);
|
||||
}
|
||||
|
||||
|
@ -288,11 +289,11 @@ impl<D: PickerDelegate> Picker<D> {
|
|||
cx.notify();
|
||||
}
|
||||
|
||||
pub fn select_index(&mut self, index: usize, cx: &mut ViewContext<Self>) {
|
||||
pub fn select_index(&mut self, index: usize, cmd: bool, cx: &mut ViewContext<Self>) {
|
||||
if self.delegate.match_count() > 0 {
|
||||
self.confirmed = true;
|
||||
self.delegate.set_selected_index(index, cx);
|
||||
self.delegate.confirm(cx);
|
||||
self.delegate.confirm(cmd, cx);
|
||||
}
|
||||
}
|
||||
|
||||
|
@ -330,7 +331,12 @@ impl<D: PickerDelegate> Picker<D> {
|
|||
|
||||
pub fn confirm(&mut self, _: &Confirm, cx: &mut ViewContext<Self>) {
|
||||
self.confirmed = true;
|
||||
self.delegate.confirm(cx);
|
||||
self.delegate.confirm(false, cx);
|
||||
}
|
||||
|
||||
pub fn secondary_confirm(&mut self, _: &SecondaryConfirm, cx: &mut ViewContext<Self>) {
|
||||
self.confirmed = true;
|
||||
self.delegate.confirm(true, cx);
|
||||
}
|
||||
|
||||
fn cancel(&mut self, _: &Cancel, cx: &mut ViewContext<Self>) {
|
||||
|
|
|
@ -1358,16 +1358,6 @@ impl LspCommand for GetCompletions {
|
|||
completions
|
||||
.into_iter()
|
||||
.filter_map(move |mut lsp_completion| {
|
||||
// For now, we can only handle additional edits if they are returned
|
||||
// when resolving the completion, not if they are present initially.
|
||||
if lsp_completion
|
||||
.additional_text_edits
|
||||
.as_ref()
|
||||
.map_or(false, |edits| !edits.is_empty())
|
||||
{
|
||||
return None;
|
||||
}
|
||||
|
||||
let (old_range, mut new_text) = match lsp_completion.text_edit.as_ref() {
|
||||
// If the language server provides a range to overwrite, then
|
||||
// check that the range is valid.
|
||||
|
|
|
@ -50,7 +50,7 @@ use lsp::{
|
|||
};
|
||||
use lsp_command::*;
|
||||
use postage::watch;
|
||||
use project_settings::ProjectSettings;
|
||||
use project_settings::{LspSettings, ProjectSettings};
|
||||
use rand::prelude::*;
|
||||
use search::SearchQuery;
|
||||
use serde::Serialize;
|
||||
|
@ -149,6 +149,7 @@ pub struct Project {
|
|||
_maintain_workspace_config: Task<()>,
|
||||
terminals: Terminals,
|
||||
copilot_enabled: bool,
|
||||
current_lsp_settings: HashMap<Arc<str>, LspSettings>,
|
||||
}
|
||||
|
||||
struct DelayedDebounced {
|
||||
|
@ -260,6 +261,7 @@ pub enum Event {
|
|||
ActiveEntryChanged(Option<ProjectEntryId>),
|
||||
WorktreeAdded,
|
||||
WorktreeRemoved(WorktreeId),
|
||||
WorktreeUpdatedEntries(WorktreeId, UpdatedEntriesSet),
|
||||
DiskBasedDiagnosticsStarted {
|
||||
language_server_id: LanguageServerId,
|
||||
},
|
||||
|
@ -614,6 +616,7 @@ impl Project {
|
|||
local_handles: Vec::new(),
|
||||
},
|
||||
copilot_enabled: Copilot::global(cx).is_some(),
|
||||
current_lsp_settings: settings::get::<ProjectSettings>(cx).lsp.clone(),
|
||||
}
|
||||
})
|
||||
}
|
||||
|
@ -706,6 +709,7 @@ impl Project {
|
|||
local_handles: Vec::new(),
|
||||
},
|
||||
copilot_enabled: Copilot::global(cx).is_some(),
|
||||
current_lsp_settings: settings::get::<ProjectSettings>(cx).lsp.clone(),
|
||||
};
|
||||
for worktree in worktrees {
|
||||
let _ = this.add_worktree(&worktree, cx);
|
||||
|
@ -779,7 +783,9 @@ impl Project {
|
|||
let mut language_servers_to_stop = Vec::new();
|
||||
let mut language_servers_to_restart = Vec::new();
|
||||
let languages = self.languages.to_vec();
|
||||
let project_settings = settings::get::<ProjectSettings>(cx).clone();
|
||||
|
||||
let new_lsp_settings = settings::get::<ProjectSettings>(cx).lsp.clone();
|
||||
let current_lsp_settings = &self.current_lsp_settings;
|
||||
for (worktree_id, started_lsp_name) in self.language_server_ids.keys() {
|
||||
let language = languages.iter().find_map(|l| {
|
||||
let adapter = l
|
||||
|
@ -796,16 +802,25 @@ impl Project {
|
|||
if !language_settings(Some(language), file.as_ref(), cx).enable_language_server {
|
||||
language_servers_to_stop.push((*worktree_id, started_lsp_name.clone()));
|
||||
} else if let Some(worktree) = worktree {
|
||||
let new_lsp_settings = project_settings
|
||||
.lsp
|
||||
.get(&adapter.name.0)
|
||||
.and_then(|s| s.initialization_options.as_ref());
|
||||
if adapter.initialization_options.as_ref() != new_lsp_settings {
|
||||
language_servers_to_restart.push((worktree, Arc::clone(language)));
|
||||
let server_name = &adapter.name.0;
|
||||
match (
|
||||
current_lsp_settings.get(server_name),
|
||||
new_lsp_settings.get(server_name),
|
||||
) {
|
||||
(None, None) => {}
|
||||
(Some(_), None) | (None, Some(_)) => {
|
||||
language_servers_to_restart.push((worktree, Arc::clone(language)));
|
||||
}
|
||||
(Some(current_lsp_settings), Some(new_lsp_settings)) => {
|
||||
if current_lsp_settings != new_lsp_settings {
|
||||
language_servers_to_restart.push((worktree, Arc::clone(language)));
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
self.current_lsp_settings = new_lsp_settings;
|
||||
|
||||
// Stop all newly-disabled language servers.
|
||||
for (worktree_id, adapter_name) in language_servers_to_stop {
|
||||
|
@ -3030,6 +3045,8 @@ impl Project {
|
|||
) -> Task<(Option<PathBuf>, Vec<WorktreeId>)> {
|
||||
let key = (worktree_id, adapter_name);
|
||||
if let Some(server_id) = self.language_server_ids.remove(&key) {
|
||||
log::info!("stopping language server {}", key.1 .0);
|
||||
|
||||
// Remove other entries for this language server as well
|
||||
let mut orphaned_worktrees = vec![worktree_id];
|
||||
let other_keys = self.language_server_ids.keys().cloned().collect::<Vec<_>>();
|
||||
|
@ -4432,11 +4449,11 @@ impl Project {
|
|||
};
|
||||
|
||||
cx.spawn(|this, mut cx| async move {
|
||||
let resolved_completion = lang_server
|
||||
let additional_text_edits = lang_server
|
||||
.request::<lsp::request::ResolveCompletionItem>(completion.lsp_completion)
|
||||
.await?;
|
||||
|
||||
if let Some(edits) = resolved_completion.additional_text_edits {
|
||||
.await?
|
||||
.additional_text_edits;
|
||||
if let Some(edits) = additional_text_edits {
|
||||
let edits = this
|
||||
.update(&mut cx, |this, cx| {
|
||||
this.edits_from_lsp(
|
||||
|
@ -5389,6 +5406,10 @@ impl Project {
|
|||
this.update_local_worktree_buffers(&worktree, changes, cx);
|
||||
this.update_local_worktree_language_servers(&worktree, changes, cx);
|
||||
this.update_local_worktree_settings(&worktree, changes, cx);
|
||||
cx.emit(Event::WorktreeUpdatedEntries(
|
||||
worktree.read(cx).id(),
|
||||
changes.clone(),
|
||||
));
|
||||
}
|
||||
worktree::Event::UpdatedGitRepositories(updated_repos) => {
|
||||
this.update_local_worktree_buffers_git_repos(worktree, updated_repos, cx)
|
||||
|
|
|
@ -397,6 +397,7 @@ impl Worktree {
|
|||
}))
|
||||
}
|
||||
|
||||
// abcdefghi
|
||||
pub fn remote(
|
||||
project_remote_id: u64,
|
||||
replica_id: ReplicaId,
|
||||
|
@ -2022,6 +2023,9 @@ impl LocalSnapshot {
|
|||
) -> Vec<Arc<Path>> {
|
||||
let mut changes = vec![];
|
||||
let mut edits = vec![];
|
||||
|
||||
let statuses = repo_ptr.statuses();
|
||||
|
||||
for mut entry in self
|
||||
.descendent_entries(false, false, &work_directory.0)
|
||||
.cloned()
|
||||
|
@ -2029,10 +2033,8 @@ impl LocalSnapshot {
|
|||
let Ok(repo_path) = entry.path.strip_prefix(&work_directory.0) else {
|
||||
continue;
|
||||
};
|
||||
let git_file_status = repo_ptr
|
||||
.status(&RepoPath(repo_path.into()))
|
||||
.log_err()
|
||||
.flatten();
|
||||
let repo_path = RepoPath(repo_path.to_path_buf());
|
||||
let git_file_status = statuses.as_ref().and_then(|s| s.get(&repo_path).copied());
|
||||
if entry.git_status != git_file_status {
|
||||
entry.git_status = git_file_status;
|
||||
changes.push(entry.path.clone());
|
||||
|
|
|
@ -159,6 +159,9 @@ pub enum Event {
|
|||
entry_id: ProjectEntryId,
|
||||
focus_opened_item: bool,
|
||||
},
|
||||
SplitEntry {
|
||||
entry_id: ProjectEntryId,
|
||||
},
|
||||
DockPositionChanged,
|
||||
Focus,
|
||||
}
|
||||
|
@ -290,6 +293,21 @@ impl ProjectPanel {
|
|||
}
|
||||
}
|
||||
}
|
||||
&Event::SplitEntry { entry_id } => {
|
||||
if let Some(worktree) = project.read(cx).worktree_for_entry(entry_id, cx) {
|
||||
if let Some(entry) = worktree.read(cx).entry_for_id(entry_id) {
|
||||
workspace
|
||||
.split_path(
|
||||
ProjectPath {
|
||||
worktree_id: worktree.read(cx).id(),
|
||||
path: entry.path.clone(),
|
||||
},
|
||||
cx,
|
||||
)
|
||||
.detach_and_log_err(cx);
|
||||
}
|
||||
}
|
||||
}
|
||||
_ => {}
|
||||
}
|
||||
})
|
||||
|
@ -620,6 +638,10 @@ impl ProjectPanel {
|
|||
});
|
||||
}
|
||||
|
||||
fn split_entry(&mut self, entry_id: ProjectEntryId, cx: &mut ViewContext<Self>) {
|
||||
cx.emit(Event::SplitEntry { entry_id });
|
||||
}
|
||||
|
||||
fn new_file(&mut self, _: &NewFile, cx: &mut ViewContext<Self>) {
|
||||
self.add_entry(false, cx)
|
||||
}
|
||||
|
@ -1333,7 +1355,11 @@ impl ProjectPanel {
|
|||
if kind.is_dir() {
|
||||
this.toggle_expanded(entry_id, cx);
|
||||
} else {
|
||||
this.open_entry(entry_id, event.click_count > 1, cx);
|
||||
if event.cmd {
|
||||
this.split_entry(entry_id, cx);
|
||||
} else if !event.cmd {
|
||||
this.open_entry(entry_id, event.click_count > 1, cx);
|
||||
}
|
||||
}
|
||||
}
|
||||
})
|
||||
|
|
|
@ -104,7 +104,7 @@ impl PickerDelegate for ProjectSymbolsDelegate {
|
|||
"Search project symbols...".into()
|
||||
}
|
||||
|
||||
fn confirm(&mut self, cx: &mut ViewContext<ProjectSymbols>) {
|
||||
fn confirm(&mut self, secondary: bool, cx: &mut ViewContext<ProjectSymbols>) {
|
||||
if let Some(symbol) = self
|
||||
.matches
|
||||
.get(self.selected_match_index)
|
||||
|
@ -122,7 +122,12 @@ impl PickerDelegate for ProjectSymbolsDelegate {
|
|||
.read(cx)
|
||||
.clip_point_utf16(symbol.range.start, Bias::Left);
|
||||
|
||||
let editor = workspace.open_project_item::<Editor>(buffer, cx);
|
||||
let editor = if secondary {
|
||||
workspace.split_project_item::<Editor>(buffer, cx)
|
||||
} else {
|
||||
workspace.open_project_item::<Editor>(buffer, cx)
|
||||
};
|
||||
|
||||
editor.update(cx, |editor, cx| {
|
||||
editor.change_selections(Some(Autoscroll::center()), cx, |s| {
|
||||
s.select_ranges([position..position])
|
||||
|
|
|
@ -134,7 +134,7 @@ impl PickerDelegate for RecentProjectsDelegate {
|
|||
let combined_string = location
|
||||
.paths()
|
||||
.iter()
|
||||
.map(|path| path.to_string_lossy().to_owned())
|
||||
.map(|path| util::paths::compact(&path).to_string_lossy().into_owned())
|
||||
.collect::<Vec<_>>()
|
||||
.join("");
|
||||
StringMatchCandidate::new(id, combined_string)
|
||||
|
@ -161,7 +161,7 @@ impl PickerDelegate for RecentProjectsDelegate {
|
|||
Task::ready(())
|
||||
}
|
||||
|
||||
fn confirm(&mut self, cx: &mut ViewContext<RecentProjects>) {
|
||||
fn confirm(&mut self, _: bool, cx: &mut ViewContext<RecentProjects>) {
|
||||
if let Some((selected_match, workspace)) = self
|
||||
.matches
|
||||
.get(self.selected_index())
|
||||
|
|
|
@ -1,6 +1,6 @@
|
|||
use crate::{
|
||||
SearchOption, SelectNextMatch, SelectPrevMatch, ToggleCaseSensitive, ToggleRegex,
|
||||
ToggleWholeWord,
|
||||
SearchOption, SelectAllMatches, SelectNextMatch, SelectPrevMatch, ToggleCaseSensitive,
|
||||
ToggleRegex, ToggleWholeWord,
|
||||
};
|
||||
use collections::HashMap;
|
||||
use editor::Editor;
|
||||
|
@ -39,8 +39,10 @@ pub fn init(cx: &mut AppContext) {
|
|||
cx.add_action(BufferSearchBar::focus_editor);
|
||||
cx.add_action(BufferSearchBar::select_next_match);
|
||||
cx.add_action(BufferSearchBar::select_prev_match);
|
||||
cx.add_action(BufferSearchBar::select_all_matches);
|
||||
cx.add_action(BufferSearchBar::select_next_match_on_pane);
|
||||
cx.add_action(BufferSearchBar::select_prev_match_on_pane);
|
||||
cx.add_action(BufferSearchBar::select_all_matches_on_pane);
|
||||
cx.add_action(BufferSearchBar::handle_editor_cancel);
|
||||
add_toggle_option_action::<ToggleCaseSensitive>(SearchOption::CaseSensitive, cx);
|
||||
add_toggle_option_action::<ToggleWholeWord>(SearchOption::WholeWord, cx);
|
||||
|
@ -66,7 +68,7 @@ pub struct BufferSearchBar {
|
|||
active_searchable_item: Option<Box<dyn SearchableItemHandle>>,
|
||||
active_match_index: Option<usize>,
|
||||
active_searchable_item_subscription: Option<Subscription>,
|
||||
seachable_items_with_matches:
|
||||
searchable_items_with_matches:
|
||||
HashMap<Box<dyn WeakSearchableItemHandle>, Vec<Box<dyn Any + Send>>>,
|
||||
pending_search: Option<Task<()>>,
|
||||
case_sensitive: bool,
|
||||
|
@ -118,7 +120,7 @@ impl View for BufferSearchBar {
|
|||
.with_children(self.active_searchable_item.as_ref().and_then(
|
||||
|searchable_item| {
|
||||
let matches = self
|
||||
.seachable_items_with_matches
|
||||
.searchable_items_with_matches
|
||||
.get(&searchable_item.downgrade())?;
|
||||
let message = if let Some(match_ix) = self.active_match_index {
|
||||
format!("{}/{}", match_ix + 1, matches.len())
|
||||
|
@ -146,6 +148,7 @@ impl View for BufferSearchBar {
|
|||
Flex::row()
|
||||
.with_child(self.render_nav_button("<", Direction::Prev, cx))
|
||||
.with_child(self.render_nav_button(">", Direction::Next, cx))
|
||||
.with_child(self.render_action_button("Select All", cx))
|
||||
.aligned(),
|
||||
)
|
||||
.with_child(
|
||||
|
@ -249,7 +252,7 @@ impl BufferSearchBar {
|
|||
active_searchable_item: None,
|
||||
active_searchable_item_subscription: None,
|
||||
active_match_index: None,
|
||||
seachable_items_with_matches: Default::default(),
|
||||
searchable_items_with_matches: Default::default(),
|
||||
case_sensitive: false,
|
||||
whole_word: false,
|
||||
regex: false,
|
||||
|
@ -265,7 +268,7 @@ impl BufferSearchBar {
|
|||
|
||||
pub fn dismiss(&mut self, _: &Dismiss, cx: &mut ViewContext<Self>) {
|
||||
self.dismissed = true;
|
||||
for searchable_item in self.seachable_items_with_matches.keys() {
|
||||
for searchable_item in self.searchable_items_with_matches.keys() {
|
||||
if let Some(searchable_item) =
|
||||
WeakSearchableItemHandle::upgrade(searchable_item.as_ref(), cx)
|
||||
{
|
||||
|
@ -401,6 +404,37 @@ impl BufferSearchBar {
|
|||
.into_any()
|
||||
}
|
||||
|
||||
fn render_action_button(
|
||||
&self,
|
||||
icon: &'static str,
|
||||
cx: &mut ViewContext<Self>,
|
||||
) -> AnyElement<Self> {
|
||||
let tooltip = "Select All Matches";
|
||||
let tooltip_style = theme::current(cx).tooltip.clone();
|
||||
let action_type_id = 0_usize;
|
||||
|
||||
enum ActionButton {}
|
||||
MouseEventHandler::<ActionButton, _>::new(action_type_id, cx, |state, cx| {
|
||||
let theme = theme::current(cx);
|
||||
let style = theme.search.action_button.style_for(state);
|
||||
Label::new(icon, style.text.clone())
|
||||
.contained()
|
||||
.with_style(style.container)
|
||||
})
|
||||
.on_click(MouseButton::Left, move |_, this, cx| {
|
||||
this.select_all_matches(&SelectAllMatches, cx)
|
||||
})
|
||||
.with_cursor_style(CursorStyle::PointingHand)
|
||||
.with_tooltip::<ActionButton>(
|
||||
action_type_id,
|
||||
tooltip.to_string(),
|
||||
Some(Box::new(SelectAllMatches)),
|
||||
tooltip_style,
|
||||
cx,
|
||||
)
|
||||
.into_any()
|
||||
}
|
||||
|
||||
fn render_close_button(
|
||||
&self,
|
||||
theme: &theme::Search,
|
||||
|
@ -488,11 +522,25 @@ impl BufferSearchBar {
|
|||
self.select_match(Direction::Prev, cx);
|
||||
}
|
||||
|
||||
fn select_all_matches(&mut self, _: &SelectAllMatches, cx: &mut ViewContext<Self>) {
|
||||
if !self.dismissed {
|
||||
if let Some(searchable_item) = self.active_searchable_item.as_ref() {
|
||||
if let Some(matches) = self
|
||||
.searchable_items_with_matches
|
||||
.get(&searchable_item.downgrade())
|
||||
{
|
||||
searchable_item.select_matches(matches, cx);
|
||||
self.focus_editor(&FocusEditor, cx);
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
pub fn select_match(&mut self, direction: Direction, cx: &mut ViewContext<Self>) {
|
||||
if let Some(index) = self.active_match_index {
|
||||
if let Some(searchable_item) = self.active_searchable_item.as_ref() {
|
||||
if let Some(matches) = self
|
||||
.seachable_items_with_matches
|
||||
.searchable_items_with_matches
|
||||
.get(&searchable_item.downgrade())
|
||||
{
|
||||
let new_match_index =
|
||||
|
@ -524,6 +572,16 @@ impl BufferSearchBar {
|
|||
}
|
||||
}
|
||||
|
||||
fn select_all_matches_on_pane(
|
||||
pane: &mut Pane,
|
||||
action: &SelectAllMatches,
|
||||
cx: &mut ViewContext<Pane>,
|
||||
) {
|
||||
if let Some(search_bar) = pane.toolbar().read(cx).item_of_type::<BufferSearchBar>() {
|
||||
search_bar.update(cx, |bar, cx| bar.select_all_matches(action, cx));
|
||||
}
|
||||
}
|
||||
|
||||
fn on_query_editor_event(
|
||||
&mut self,
|
||||
_: ViewHandle<Editor>,
|
||||
|
@ -547,7 +605,7 @@ impl BufferSearchBar {
|
|||
|
||||
fn clear_matches(&mut self, cx: &mut ViewContext<Self>) {
|
||||
let mut active_item_matches = None;
|
||||
for (searchable_item, matches) in self.seachable_items_with_matches.drain() {
|
||||
for (searchable_item, matches) in self.searchable_items_with_matches.drain() {
|
||||
if let Some(searchable_item) =
|
||||
WeakSearchableItemHandle::upgrade(searchable_item.as_ref(), cx)
|
||||
{
|
||||
|
@ -559,7 +617,7 @@ impl BufferSearchBar {
|
|||
}
|
||||
}
|
||||
|
||||
self.seachable_items_with_matches
|
||||
self.searchable_items_with_matches
|
||||
.extend(active_item_matches);
|
||||
}
|
||||
|
||||
|
@ -605,13 +663,13 @@ impl BufferSearchBar {
|
|||
if let Some(active_searchable_item) =
|
||||
WeakSearchableItemHandle::upgrade(active_searchable_item.as_ref(), cx)
|
||||
{
|
||||
this.seachable_items_with_matches
|
||||
this.searchable_items_with_matches
|
||||
.insert(active_searchable_item.downgrade(), matches);
|
||||
|
||||
this.update_match_index(cx);
|
||||
if !this.dismissed {
|
||||
let matches = this
|
||||
.seachable_items_with_matches
|
||||
.searchable_items_with_matches
|
||||
.get(&active_searchable_item.downgrade())
|
||||
.unwrap();
|
||||
active_searchable_item.update_matches(matches, cx);
|
||||
|
@ -637,7 +695,7 @@ impl BufferSearchBar {
|
|||
.as_ref()
|
||||
.and_then(|searchable_item| {
|
||||
let matches = self
|
||||
.seachable_items_with_matches
|
||||
.searchable_items_with_matches
|
||||
.get(&searchable_item.downgrade())?;
|
||||
searchable_item.active_match_index(matches, cx)
|
||||
});
|
||||
|
@ -966,4 +1024,133 @@ mod tests {
|
|||
assert_eq!(search_bar.active_match_index, Some(2));
|
||||
});
|
||||
}
|
||||
|
||||
#[gpui::test]
|
||||
async fn test_search_select_all_matches(cx: &mut TestAppContext) {
|
||||
crate::project_search::tests::init_test(cx);
|
||||
|
||||
let buffer_text = r#"
|
||||
A regular expression (shortened as regex or regexp;[1] also referred to as
|
||||
rational expression[2][3]) is a sequence of characters that specifies a search
|
||||
pattern in text. Usually such patterns are used by string-searching algorithms
|
||||
for "find" or "find and replace" operations on strings, or for input validation.
|
||||
"#
|
||||
.unindent();
|
||||
let expected_query_matches_count = buffer_text
|
||||
.chars()
|
||||
.filter(|c| c.to_ascii_lowercase() == 'a')
|
||||
.count();
|
||||
assert!(
|
||||
expected_query_matches_count > 1,
|
||||
"Should pick a query with multiple results"
|
||||
);
|
||||
let buffer = cx.add_model(|cx| Buffer::new(0, buffer_text, cx));
|
||||
let (window_id, _root_view) = cx.add_window(|_| EmptyView);
|
||||
|
||||
let editor = cx.add_view(window_id, |cx| Editor::for_buffer(buffer.clone(), None, cx));
|
||||
|
||||
let search_bar = cx.add_view(window_id, |cx| {
|
||||
let mut search_bar = BufferSearchBar::new(cx);
|
||||
search_bar.set_active_pane_item(Some(&editor), cx);
|
||||
search_bar.show(false, true, cx);
|
||||
search_bar
|
||||
});
|
||||
|
||||
search_bar.update(cx, |search_bar, cx| {
|
||||
search_bar.set_query("a", cx);
|
||||
});
|
||||
|
||||
editor.next_notification(cx).await;
|
||||
let initial_selections = editor.update(cx, |editor, cx| {
|
||||
let initial_selections = editor.selections.display_ranges(cx);
|
||||
assert_eq!(
|
||||
initial_selections.len(), 1,
|
||||
"Expected to have only one selection before adding carets to all matches, but got: {initial_selections:?}",
|
||||
);
|
||||
initial_selections
|
||||
});
|
||||
search_bar.update(cx, |search_bar, _| {
|
||||
assert_eq!(search_bar.active_match_index, Some(0));
|
||||
});
|
||||
|
||||
search_bar.update(cx, |search_bar, cx| {
|
||||
search_bar.select_all_matches(&SelectAllMatches, cx);
|
||||
let all_selections =
|
||||
editor.update(cx, |editor, cx| editor.selections.display_ranges(cx));
|
||||
assert_eq!(
|
||||
all_selections.len(),
|
||||
expected_query_matches_count,
|
||||
"Should select all `a` characters in the buffer, but got: {all_selections:?}"
|
||||
);
|
||||
});
|
||||
search_bar.update(cx, |search_bar, _| {
|
||||
assert_eq!(
|
||||
search_bar.active_match_index,
|
||||
Some(0),
|
||||
"Match index should not change after selecting all matches"
|
||||
);
|
||||
});
|
||||
|
||||
search_bar.update(cx, |search_bar, cx| {
|
||||
search_bar.select_next_match(&SelectNextMatch, cx);
|
||||
let all_selections =
|
||||
editor.update(cx, |editor, cx| editor.selections.display_ranges(cx));
|
||||
assert_eq!(
|
||||
all_selections.len(),
|
||||
1,
|
||||
"On next match, should deselect items and select the next match"
|
||||
);
|
||||
assert_ne!(
|
||||
all_selections, initial_selections,
|
||||
"Next match should be different from the first selection"
|
||||
);
|
||||
});
|
||||
search_bar.update(cx, |search_bar, _| {
|
||||
assert_eq!(
|
||||
search_bar.active_match_index,
|
||||
Some(1),
|
||||
"Match index should be updated to the next one"
|
||||
);
|
||||
});
|
||||
|
||||
search_bar.update(cx, |search_bar, cx| {
|
||||
search_bar.select_all_matches(&SelectAllMatches, cx);
|
||||
let all_selections =
|
||||
editor.update(cx, |editor, cx| editor.selections.display_ranges(cx));
|
||||
assert_eq!(
|
||||
all_selections.len(),
|
||||
expected_query_matches_count,
|
||||
"Should select all `a` characters in the buffer, but got: {all_selections:?}"
|
||||
);
|
||||
});
|
||||
search_bar.update(cx, |search_bar, _| {
|
||||
assert_eq!(
|
||||
search_bar.active_match_index,
|
||||
Some(1),
|
||||
"Match index should not change after selecting all matches"
|
||||
);
|
||||
});
|
||||
|
||||
search_bar.update(cx, |search_bar, cx| {
|
||||
search_bar.select_prev_match(&SelectPrevMatch, cx);
|
||||
let all_selections =
|
||||
editor.update(cx, |editor, cx| editor.selections.display_ranges(cx));
|
||||
assert_eq!(
|
||||
all_selections.len(),
|
||||
1,
|
||||
"On previous match, should deselect items and select the previous item"
|
||||
);
|
||||
assert_eq!(
|
||||
all_selections, initial_selections,
|
||||
"Previous match should be the same as the first selection"
|
||||
);
|
||||
});
|
||||
search_bar.update(cx, |search_bar, _| {
|
||||
assert_eq!(
|
||||
search_bar.active_match_index,
|
||||
Some(0),
|
||||
"Match index should be updated to the previous one"
|
||||
);
|
||||
});
|
||||
}
|
||||
}
|
||||
|
|
|
@ -17,7 +17,8 @@ actions!(
|
|||
ToggleCaseSensitive,
|
||||
ToggleRegex,
|
||||
SelectNextMatch,
|
||||
SelectPrevMatch
|
||||
SelectPrevMatch,
|
||||
SelectAllMatches,
|
||||
]
|
||||
);
|
||||
|
||||
|
|
|
@ -198,7 +198,7 @@ impl TerminalLineHeight {
|
|||
match self {
|
||||
TerminalLineHeight::Comfortable => 1.618,
|
||||
TerminalLineHeight::Standard => 1.3,
|
||||
TerminalLineHeight::Custom(line_height) => *line_height,
|
||||
TerminalLineHeight::Custom(line_height) => f32::max(*line_height, 1.),
|
||||
}
|
||||
}
|
||||
}
|
||||
|
@ -908,6 +908,21 @@ impl Terminal {
|
|||
}
|
||||
}
|
||||
|
||||
pub fn select_matches(&mut self, matches: Vec<RangeInclusive<Point>>) {
|
||||
let matches_to_select = self
|
||||
.matches
|
||||
.iter()
|
||||
.filter(|self_match| matches.contains(self_match))
|
||||
.cloned()
|
||||
.collect::<Vec<_>>();
|
||||
for match_to_select in matches_to_select {
|
||||
self.set_selection(Some((
|
||||
make_selection(&match_to_select),
|
||||
*match_to_select.end(),
|
||||
)));
|
||||
}
|
||||
}
|
||||
|
||||
fn set_selection(&mut self, selection: Option<(Selection, Point)>) {
|
||||
self.events
|
||||
.push_back(InternalEvent::SetSelection(selection));
|
||||
|
|
|
@ -221,6 +221,14 @@ impl TerminalPanel {
|
|||
pane::Event::ZoomIn => cx.emit(Event::ZoomIn),
|
||||
pane::Event::ZoomOut => cx.emit(Event::ZoomOut),
|
||||
pane::Event::Focus => cx.emit(Event::Focus),
|
||||
|
||||
pane::Event::AddItem { item } => {
|
||||
if let Some(workspace) = self.workspace.upgrade(cx) {
|
||||
let pane = self.pane.clone();
|
||||
workspace.update(cx, |workspace, cx| item.added_to_pane(workspace, pane, cx))
|
||||
}
|
||||
}
|
||||
|
||||
_ => {}
|
||||
}
|
||||
}
|
||||
|
|
|
@ -275,7 +275,7 @@ impl TerminalView {
|
|||
cx.spawn(|this, mut cx| async move {
|
||||
Timer::after(CURSOR_BLINK_INTERVAL).await;
|
||||
this.update(&mut cx, |this, cx| this.resume_cursor_blinking(epoch, cx))
|
||||
.log_err();
|
||||
.ok();
|
||||
})
|
||||
.detach();
|
||||
}
|
||||
|
@ -647,7 +647,11 @@ impl SearchableItem for TerminalView {
|
|||
}
|
||||
|
||||
/// Convert events raised by this item into search-relevant events (if applicable)
|
||||
fn to_search_event(event: &Self::Event) -> Option<SearchEvent> {
|
||||
fn to_search_event(
|
||||
&mut self,
|
||||
event: &Self::Event,
|
||||
_: &mut ViewContext<Self>,
|
||||
) -> Option<SearchEvent> {
|
||||
match event {
|
||||
Event::Wakeup => Some(SearchEvent::MatchesInvalidated),
|
||||
Event::SelectionsChanged => Some(SearchEvent::ActiveMatchChanged),
|
||||
|
@ -682,6 +686,13 @@ impl SearchableItem for TerminalView {
|
|||
cx.notify();
|
||||
}
|
||||
|
||||
/// Add selections for all matches given.
|
||||
fn select_matches(&mut self, matches: Vec<Self::Match>, cx: &mut ViewContext<Self>) {
|
||||
self.terminal()
|
||||
.update(cx, |term, _| term.select_matches(matches));
|
||||
cx.notify();
|
||||
}
|
||||
|
||||
/// Get all of the matches for this query, should be done on the background
|
||||
fn find_matches(
|
||||
&mut self,
|
||||
|
@ -907,6 +918,7 @@ mod tests {
|
|||
let params = cx.update(AppState::test);
|
||||
cx.update(|cx| {
|
||||
theme::init((), cx);
|
||||
Project::init_settings(cx);
|
||||
language::init(cx);
|
||||
});
|
||||
|
||||
|
|
|
@ -379,6 +379,7 @@ pub struct Search {
|
|||
pub invalid_include_exclude_editor: ContainerStyle,
|
||||
pub include_exclude_inputs: ContainedText,
|
||||
pub option_button: Toggleable<Interactive<ContainedText>>,
|
||||
pub action_button: Interactive<ContainedText>,
|
||||
pub match_background: Color,
|
||||
pub match_index: ContainedText,
|
||||
pub results_status: TextStyle,
|
||||
|
@ -586,7 +587,7 @@ pub struct Picker {
|
|||
pub no_matches: ContainedLabel,
|
||||
pub item: Toggleable<Interactive<ContainedLabel>>,
|
||||
pub header: ContainedLabel,
|
||||
pub footer: ContainedLabel,
|
||||
pub footer: Interactive<ContainedLabel>,
|
||||
}
|
||||
|
||||
#[derive(Clone, Debug, Deserialize, Default, JsonSchema)]
|
||||
|
|
|
@ -5,6 +5,7 @@ use parking_lot::Mutex;
|
|||
use serde::Deserialize;
|
||||
use serde_json::Value;
|
||||
use std::{
|
||||
borrow::Cow,
|
||||
collections::HashMap,
|
||||
sync::{
|
||||
atomic::{AtomicUsize, Ordering::SeqCst},
|
||||
|
@ -43,7 +44,7 @@ impl ThemeRegistry {
|
|||
this
|
||||
}
|
||||
|
||||
pub fn list(&self, staff: bool) -> impl Iterator<Item = ThemeMeta> + '_ {
|
||||
pub fn list_names(&self, staff: bool) -> impl Iterator<Item = Cow<str>> + '_ {
|
||||
let mut dirs = self.assets.list("themes/");
|
||||
|
||||
if !staff {
|
||||
|
@ -53,10 +54,21 @@ impl ThemeRegistry {
|
|||
.collect()
|
||||
}
|
||||
|
||||
dirs.into_iter().filter_map(|path| {
|
||||
let filename = path.strip_prefix("themes/")?;
|
||||
let theme_name = filename.strip_suffix(".json")?;
|
||||
self.get(theme_name).ok().map(|theme| theme.meta.clone())
|
||||
fn get_name(path: &str) -> Option<&str> {
|
||||
path.strip_prefix("themes/")?.strip_suffix(".json")
|
||||
}
|
||||
|
||||
dirs.into_iter().filter_map(|path| match path {
|
||||
Cow::Borrowed(path) => Some(Cow::Borrowed(get_name(path)?)),
|
||||
Cow::Owned(path) => Some(Cow::Owned(get_name(&path)?.to_string())),
|
||||
})
|
||||
}
|
||||
|
||||
pub fn list(&self, staff: bool) -> impl Iterator<Item = ThemeMeta> + '_ {
|
||||
self.list_names(staff).filter_map(|theme_name| {
|
||||
self.get(theme_name.as_ref())
|
||||
.ok()
|
||||
.map(|theme| theme.meta.clone())
|
||||
})
|
||||
}
|
||||
|
||||
|
|
|
@ -13,6 +13,7 @@ use std::sync::Arc;
|
|||
use util::ResultExt as _;
|
||||
|
||||
const MIN_FONT_SIZE: f32 = 6.0;
|
||||
const MIN_LINE_HEIGHT: f32 = 1.0;
|
||||
|
||||
#[derive(Clone, JsonSchema)]
|
||||
pub struct ThemeSettings {
|
||||
|
@ -20,6 +21,7 @@ pub struct ThemeSettings {
|
|||
pub buffer_font_features: fonts::Features,
|
||||
pub buffer_font_family: FamilyId,
|
||||
pub(crate) buffer_font_size: f32,
|
||||
pub(crate) buffer_line_height: BufferLineHeight,
|
||||
#[serde(skip)]
|
||||
pub theme: Arc<Theme>,
|
||||
}
|
||||
|
@ -33,11 +35,32 @@ pub struct ThemeSettingsContent {
|
|||
#[serde(default)]
|
||||
pub buffer_font_size: Option<f32>,
|
||||
#[serde(default)]
|
||||
pub buffer_line_height: Option<BufferLineHeight>,
|
||||
#[serde(default)]
|
||||
pub buffer_font_features: Option<fonts::Features>,
|
||||
#[serde(default)]
|
||||
pub theme: Option<String>,
|
||||
}
|
||||
|
||||
#[derive(Clone, Copy, Debug, Serialize, Deserialize, PartialEq, JsonSchema, Default)]
|
||||
#[serde(rename_all = "snake_case")]
|
||||
pub enum BufferLineHeight {
|
||||
#[default]
|
||||
Comfortable,
|
||||
Standard,
|
||||
Custom(f32),
|
||||
}
|
||||
|
||||
impl BufferLineHeight {
|
||||
pub fn value(&self) -> f32 {
|
||||
match self {
|
||||
BufferLineHeight::Comfortable => 1.618,
|
||||
BufferLineHeight::Standard => 1.3,
|
||||
BufferLineHeight::Custom(line_height) => *line_height,
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
impl ThemeSettings {
|
||||
pub fn buffer_font_size(&self, cx: &AppContext) -> f32 {
|
||||
if cx.has_global::<AdjustedBufferFontSize>() {
|
||||
|
@ -47,6 +70,10 @@ impl ThemeSettings {
|
|||
}
|
||||
.max(MIN_FONT_SIZE)
|
||||
}
|
||||
|
||||
pub fn line_height(&self) -> f32 {
|
||||
f32::max(self.buffer_line_height.value(), MIN_LINE_HEIGHT)
|
||||
}
|
||||
}
|
||||
|
||||
pub fn adjusted_font_size(size: f32, cx: &AppContext) -> f32 {
|
||||
|
@ -106,6 +133,7 @@ impl settings::Setting for ThemeSettings {
|
|||
buffer_font_family_name: defaults.buffer_font_family.clone().unwrap(),
|
||||
buffer_font_features,
|
||||
buffer_font_size: defaults.buffer_font_size.unwrap(),
|
||||
buffer_line_height: defaults.buffer_line_height.unwrap(),
|
||||
theme: themes.get(defaults.theme.as_ref().unwrap()).unwrap(),
|
||||
};
|
||||
|
||||
|
@ -136,6 +164,7 @@ impl settings::Setting for ThemeSettings {
|
|||
}
|
||||
|
||||
merge(&mut this.buffer_font_size, value.buffer_font_size);
|
||||
merge(&mut this.buffer_line_height, value.buffer_line_height);
|
||||
}
|
||||
|
||||
Ok(this)
|
||||
|
@ -149,8 +178,8 @@ impl settings::Setting for ThemeSettings {
|
|||
let mut root_schema = generator.root_schema_for::<ThemeSettingsContent>();
|
||||
let theme_names = cx
|
||||
.global::<Arc<ThemeRegistry>>()
|
||||
.list(params.staff_mode)
|
||||
.map(|theme| Value::String(theme.name.clone()))
|
||||
.list_names(params.staff_mode)
|
||||
.map(|theme_name| Value::String(theme_name.to_string()))
|
||||
.collect();
|
||||
|
||||
let theme_name_schema = SchemaObject {
|
||||
|
|
|
@ -120,7 +120,7 @@ impl PickerDelegate for ThemeSelectorDelegate {
|
|||
self.matches.len()
|
||||
}
|
||||
|
||||
fn confirm(&mut self, cx: &mut ViewContext<ThemeSelector>) {
|
||||
fn confirm(&mut self, _: bool, cx: &mut ViewContext<ThemeSelector>) {
|
||||
self.selection_completed = true;
|
||||
|
||||
let theme_name = theme::current(cx).meta.name.clone();
|
||||
|
|
|
@ -6,6 +6,7 @@ lazy_static::lazy_static! {
|
|||
pub static ref HOME: PathBuf = dirs::home_dir().expect("failed to determine home directory");
|
||||
pub static ref CONFIG_DIR: PathBuf = HOME.join(".config").join("zed");
|
||||
pub static ref CONVERSATIONS_DIR: PathBuf = HOME.join(".config/zed/conversations");
|
||||
pub static ref EMBEDDINGS_DIR: PathBuf = HOME.join(".config/zed/embeddings");
|
||||
pub static ref LOGS_DIR: PathBuf = HOME.join("Library/Logs/Zed");
|
||||
pub static ref SUPPORT_DIR: PathBuf = HOME.join("Library/Application Support/Zed");
|
||||
pub static ref LANGUAGES_DIR: PathBuf = HOME.join("Library/Application Support/Zed/languages");
|
||||
|
|
16
crates/vcs_menu/Cargo.toml
Normal file
16
crates/vcs_menu/Cargo.toml
Normal file
|
@ -0,0 +1,16 @@
|
|||
[package]
|
||||
name = "vcs_menu"
|
||||
version = "0.1.0"
|
||||
edition = "2021"
|
||||
publish = false
|
||||
# See more keys and their definitions at https://doc.rust-lang.org/cargo/reference/manifest.html
|
||||
|
||||
[dependencies]
|
||||
fuzzy = {path = "../fuzzy"}
|
||||
gpui = {path = "../gpui"}
|
||||
picker = {path = "../picker"}
|
||||
util = {path = "../util"}
|
||||
theme = {path = "../theme"}
|
||||
workspace = {path = "../workspace"}
|
||||
|
||||
anyhow.workspace = true
|
|
@ -1,15 +1,22 @@
|
|||
use anyhow::{anyhow, bail};
|
||||
use anyhow::{anyhow, bail, Result};
|
||||
use fuzzy::{StringMatch, StringMatchCandidate};
|
||||
use gpui::{elements::*, AppContext, MouseState, Task, ViewContext, ViewHandle};
|
||||
use gpui::{
|
||||
actions,
|
||||
elements::*,
|
||||
platform::{CursorStyle, MouseButton},
|
||||
AppContext, MouseState, Task, ViewContext, ViewHandle,
|
||||
};
|
||||
use picker::{Picker, PickerDelegate, PickerEvent};
|
||||
use std::{ops::Not, sync::Arc};
|
||||
use util::ResultExt;
|
||||
use workspace::{Toast, Workspace};
|
||||
|
||||
actions!(branches, [OpenRecent]);
|
||||
|
||||
pub fn init(cx: &mut AppContext) {
|
||||
Picker::<BranchListDelegate>::init(cx);
|
||||
cx.add_async_action(toggle);
|
||||
}
|
||||
|
||||
pub type BranchList = Picker<BranchListDelegate>;
|
||||
|
||||
pub fn build_branch_list(
|
||||
|
@ -22,19 +29,60 @@ pub fn build_branch_list(
|
|||
workspace,
|
||||
selected_index: 0,
|
||||
last_query: String::default(),
|
||||
branch_name_trailoff_after: 29,
|
||||
},
|
||||
cx,
|
||||
)
|
||||
.with_theme(|theme| theme.picker.clone())
|
||||
}
|
||||
|
||||
fn toggle(
|
||||
_: &mut Workspace,
|
||||
_: &OpenRecent,
|
||||
cx: &mut ViewContext<Workspace>,
|
||||
) -> Option<Task<Result<()>>> {
|
||||
Some(cx.spawn(|workspace, mut cx| async move {
|
||||
workspace.update(&mut cx, |workspace, cx| {
|
||||
workspace.toggle_modal(cx, |_, cx| {
|
||||
let workspace = cx.handle();
|
||||
cx.add_view(|cx| {
|
||||
Picker::new(
|
||||
BranchListDelegate {
|
||||
matches: vec![],
|
||||
workspace,
|
||||
selected_index: 0,
|
||||
last_query: String::default(),
|
||||
/// Modal branch picker has a longer trailoff than a popover one.
|
||||
branch_name_trailoff_after: 70,
|
||||
},
|
||||
cx,
|
||||
)
|
||||
.with_theme(|theme| theme.picker.clone())
|
||||
.with_max_size(800., 1200.)
|
||||
})
|
||||
});
|
||||
})?;
|
||||
Ok(())
|
||||
}))
|
||||
}
|
||||
|
||||
pub struct BranchListDelegate {
|
||||
matches: Vec<StringMatch>,
|
||||
workspace: ViewHandle<Workspace>,
|
||||
selected_index: usize,
|
||||
last_query: String,
|
||||
/// Max length of branch name before we truncate it and add a trailing `...`.
|
||||
branch_name_trailoff_after: usize,
|
||||
}
|
||||
|
||||
impl BranchListDelegate {
|
||||
fn display_error_toast(&self, message: String, cx: &mut ViewContext<BranchList>) {
|
||||
const GIT_CHECKOUT_FAILURE_ID: usize = 2048;
|
||||
self.workspace.update(cx, |model, ctx| {
|
||||
model.show_toast(Toast::new(GIT_CHECKOUT_FAILURE_ID, message), ctx)
|
||||
});
|
||||
}
|
||||
}
|
||||
impl PickerDelegate for BranchListDelegate {
|
||||
fn placeholder_text(&self) -> Arc<str> {
|
||||
"Select branch...".into()
|
||||
|
@ -58,12 +106,14 @@ impl PickerDelegate for BranchListDelegate {
|
|||
.read_with(&mut cx, |view, cx| {
|
||||
let delegate = view.delegate();
|
||||
let project = delegate.workspace.read(cx).project().read(&cx);
|
||||
let mut cwd =
|
||||
project
|
||||
|
||||
let Some(worktree) = project
|
||||
.visible_worktrees(cx)
|
||||
.next()
|
||||
.unwrap()
|
||||
.read(cx)
|
||||
else {
|
||||
bail!("Cannot update branch list as there are no visible worktrees")
|
||||
};
|
||||
let mut cwd = worktree .read(cx)
|
||||
.abs_path()
|
||||
.to_path_buf();
|
||||
cwd.push(".git");
|
||||
|
@ -132,44 +182,45 @@ impl PickerDelegate for BranchListDelegate {
|
|||
})
|
||||
}
|
||||
|
||||
fn confirm(&mut self, cx: &mut ViewContext<Picker<Self>>) {
|
||||
fn confirm(&mut self, _: bool, cx: &mut ViewContext<Picker<Self>>) {
|
||||
let current_pick = self.selected_index();
|
||||
let current_pick = self.matches[current_pick].string.clone();
|
||||
let Some(current_pick) = self.matches.get(current_pick).map(|pick| pick.string.clone()) else {
|
||||
return;
|
||||
};
|
||||
cx.spawn(|picker, mut cx| async move {
|
||||
picker.update(&mut cx, |this, cx| {
|
||||
let project = this.delegate().workspace.read(cx).project().read(cx);
|
||||
let mut cwd = project
|
||||
.visible_worktrees(cx)
|
||||
.next()
|
||||
.ok_or_else(|| anyhow!("There are no visisible worktrees."))?
|
||||
.read(cx)
|
||||
.abs_path()
|
||||
.to_path_buf();
|
||||
cwd.push(".git");
|
||||
let status = project
|
||||
.fs()
|
||||
.open_repo(&cwd)
|
||||
.ok_or_else(|| anyhow!("Could not open repository at path `{}`", cwd.as_os_str().to_string_lossy()))?
|
||||
.lock()
|
||||
.change_branch(¤t_pick);
|
||||
if status.is_err() {
|
||||
const GIT_CHECKOUT_FAILURE_ID: usize = 2048;
|
||||
this.delegate().workspace.update(cx, |model, ctx| {
|
||||
model.show_toast(
|
||||
Toast::new(
|
||||
GIT_CHECKOUT_FAILURE_ID,
|
||||
format!("Failed to checkout branch '{current_pick}', check for conflicts or unstashed files"),
|
||||
),
|
||||
ctx,
|
||||
)
|
||||
});
|
||||
status?;
|
||||
}
|
||||
cx.emit(PickerEvent::Dismiss);
|
||||
picker
|
||||
.update(&mut cx, |this, cx| {
|
||||
let project = this.delegate().workspace.read(cx).project().read(cx);
|
||||
let mut cwd = project
|
||||
.visible_worktrees(cx)
|
||||
.next()
|
||||
.ok_or_else(|| anyhow!("There are no visisible worktrees."))?
|
||||
.read(cx)
|
||||
.abs_path()
|
||||
.to_path_buf();
|
||||
cwd.push(".git");
|
||||
let status = project
|
||||
.fs()
|
||||
.open_repo(&cwd)
|
||||
.ok_or_else(|| {
|
||||
anyhow!(
|
||||
"Could not open repository at path `{}`",
|
||||
cwd.as_os_str().to_string_lossy()
|
||||
)
|
||||
})?
|
||||
.lock()
|
||||
.change_branch(¤t_pick);
|
||||
if status.is_err() {
|
||||
this.delegate().display_error_toast(format!("Failed to checkout branch '{current_pick}', check for conflicts or unstashed files"), cx);
|
||||
status?;
|
||||
}
|
||||
cx.emit(PickerEvent::Dismiss);
|
||||
|
||||
Ok::<(), anyhow::Error>(())
|
||||
}).log_err();
|
||||
}).detach();
|
||||
Ok::<(), anyhow::Error>(())
|
||||
})
|
||||
.log_err();
|
||||
})
|
||||
.detach();
|
||||
}
|
||||
|
||||
fn dismissed(&mut self, cx: &mut ViewContext<Picker<Self>>) {
|
||||
|
@ -183,15 +234,15 @@ impl PickerDelegate for BranchListDelegate {
|
|||
selected: bool,
|
||||
cx: &gpui::AppContext,
|
||||
) -> AnyElement<Picker<Self>> {
|
||||
const DISPLAYED_MATCH_LEN: usize = 29;
|
||||
let theme = &theme::current(cx);
|
||||
let hit = &self.matches[ix];
|
||||
let shortened_branch_name = util::truncate_and_trailoff(&hit.string, DISPLAYED_MATCH_LEN);
|
||||
let shortened_branch_name =
|
||||
util::truncate_and_trailoff(&hit.string, self.branch_name_trailoff_after);
|
||||
let highlights = hit
|
||||
.positions
|
||||
.iter()
|
||||
.copied()
|
||||
.filter(|index| index < &DISPLAYED_MATCH_LEN)
|
||||
.filter(|index| index < &self.branch_name_trailoff_after)
|
||||
.collect();
|
||||
let style = theme.picker.item.in_state(selected).style_for(mouse_state);
|
||||
Flex::row()
|
||||
|
@ -235,4 +286,61 @@ impl PickerDelegate for BranchListDelegate {
|
|||
};
|
||||
Some(label.into_any())
|
||||
}
|
||||
fn render_footer(
|
||||
&self,
|
||||
cx: &mut ViewContext<Picker<Self>>,
|
||||
) -> Option<AnyElement<Picker<Self>>> {
|
||||
if !self.last_query.is_empty() {
|
||||
let theme = &theme::current(cx);
|
||||
let style = theme.picker.footer.clone();
|
||||
enum BranchCreateButton {}
|
||||
Some(
|
||||
Flex::row().with_child(MouseEventHandler::<BranchCreateButton, _>::new(0, cx, |state, _| {
|
||||
let style = style.style_for(state);
|
||||
Label::new("Create branch", style.label.clone())
|
||||
.contained()
|
||||
.with_style(style.container)
|
||||
})
|
||||
.with_cursor_style(CursorStyle::PointingHand)
|
||||
.on_down(MouseButton::Left, |_, _, cx| {
|
||||
cx.spawn(|picker, mut cx| async move {
|
||||
picker.update(&mut cx, |this, cx| {
|
||||
let project = this.delegate().workspace.read(cx).project().read(cx);
|
||||
let current_pick = &this.delegate().last_query;
|
||||
let mut cwd = project
|
||||
.visible_worktrees(cx)
|
||||
.next()
|
||||
.ok_or_else(|| anyhow!("There are no visisible worktrees."))?
|
||||
.read(cx)
|
||||
.abs_path()
|
||||
.to_path_buf();
|
||||
cwd.push(".git");
|
||||
let repo = project
|
||||
.fs()
|
||||
.open_repo(&cwd)
|
||||
.ok_or_else(|| anyhow!("Could not open repository at path `{}`", cwd.as_os_str().to_string_lossy()))?;
|
||||
let repo = repo
|
||||
.lock();
|
||||
let status = repo
|
||||
.create_branch(¤t_pick);
|
||||
if status.is_err() {
|
||||
this.delegate().display_error_toast(format!("Failed to create branch '{current_pick}', check for conflicts or unstashed files"), cx);
|
||||
status?;
|
||||
}
|
||||
let status = repo.change_branch(¤t_pick);
|
||||
if status.is_err() {
|
||||
this.delegate().display_error_toast(format!("Failed to chec branch '{current_pick}', check for conflicts or unstashed files"), cx);
|
||||
status?;
|
||||
}
|
||||
cx.emit(PickerEvent::Dismiss);
|
||||
Ok::<(), anyhow::Error>(())
|
||||
})
|
||||
}).detach();
|
||||
})).aligned().right()
|
||||
.into_any(),
|
||||
)
|
||||
} else {
|
||||
None
|
||||
}
|
||||
}
|
||||
}
|
49
crates/vector_store/Cargo.toml
Normal file
49
crates/vector_store/Cargo.toml
Normal file
|
@ -0,0 +1,49 @@
|
|||
[package]
|
||||
name = "vector_store"
|
||||
version = "0.1.0"
|
||||
edition = "2021"
|
||||
publish = false
|
||||
|
||||
[lib]
|
||||
path = "src/vector_store.rs"
|
||||
doctest = false
|
||||
|
||||
[dependencies]
|
||||
gpui = { path = "../gpui" }
|
||||
language = { path = "../language" }
|
||||
project = { path = "../project" }
|
||||
workspace = { path = "../workspace" }
|
||||
util = { path = "../util" }
|
||||
picker = { path = "../picker" }
|
||||
theme = { path = "../theme" }
|
||||
editor = { path = "../editor" }
|
||||
rpc = { path = "../rpc" }
|
||||
settings = { path = "../settings" }
|
||||
anyhow.workspace = true
|
||||
futures.workspace = true
|
||||
smol.workspace = true
|
||||
rusqlite = { version = "0.27.0", features = ["blob", "array", "modern_sqlite"] }
|
||||
isahc.workspace = true
|
||||
log.workspace = true
|
||||
tree-sitter.workspace = true
|
||||
lazy_static.workspace = true
|
||||
serde.workspace = true
|
||||
serde_json.workspace = true
|
||||
async-trait.workspace = true
|
||||
bincode = "1.3.3"
|
||||
matrixmultiply = "0.3.7"
|
||||
tiktoken-rs = "0.5.0"
|
||||
rand.workspace = true
|
||||
schemars.workspace = true
|
||||
|
||||
[dev-dependencies]
|
||||
gpui = { path = "../gpui", features = ["test-support"] }
|
||||
language = { path = "../language", features = ["test-support"] }
|
||||
project = { path = "../project", features = ["test-support"] }
|
||||
rpc = { path = "../rpc", features = ["test-support"] }
|
||||
workspace = { path = "../workspace", features = ["test-support"] }
|
||||
settings = { path = "../settings", features = ["test-support"]}
|
||||
tree-sitter-rust = "*"
|
||||
rand.workspace = true
|
||||
unindent.workspace = true
|
||||
tempdir.workspace = true
|
31
crates/vector_store/README.md
Normal file
31
crates/vector_store/README.md
Normal file
|
@ -0,0 +1,31 @@
|
|||
|
||||
WIP: Sample SQL Queries
|
||||
/*
|
||||
|
||||
create table "files" (
|
||||
"id" INTEGER PRIMARY KEY,
|
||||
"path" VARCHAR,
|
||||
"sha1" VARCHAR,
|
||||
);
|
||||
|
||||
create table symbols (
|
||||
"file_id" INTEGER REFERENCES("files", "id") ON CASCADE DELETE,
|
||||
"offset" INTEGER,
|
||||
"embedding" VECTOR,
|
||||
);
|
||||
|
||||
insert into "files" ("path", "sha1") values ("src/main.rs", "sha1") return id;
|
||||
insert into symbols (
|
||||
"file_id",
|
||||
"start",
|
||||
"end",
|
||||
"embedding"
|
||||
) values (
|
||||
(id,),
|
||||
(id,),
|
||||
(id,),
|
||||
(id,),
|
||||
)
|
||||
|
||||
|
||||
*/
|
325
crates/vector_store/src/db.rs
Normal file
325
crates/vector_store/src/db.rs
Normal file
|
@ -0,0 +1,325 @@
|
|||
use std::{
|
||||
cmp::Ordering,
|
||||
collections::HashMap,
|
||||
path::{Path, PathBuf},
|
||||
rc::Rc,
|
||||
time::SystemTime,
|
||||
};
|
||||
|
||||
use anyhow::{anyhow, Result};
|
||||
|
||||
use crate::parsing::ParsedFile;
|
||||
use crate::VECTOR_STORE_VERSION;
|
||||
use rpc::proto::Timestamp;
|
||||
use rusqlite::{
|
||||
params,
|
||||
types::{FromSql, FromSqlResult, ValueRef},
|
||||
};
|
||||
|
||||
#[derive(Debug)]
|
||||
pub struct FileRecord {
|
||||
pub id: usize,
|
||||
pub relative_path: String,
|
||||
pub mtime: Timestamp,
|
||||
}
|
||||
|
||||
#[derive(Debug)]
|
||||
struct Embedding(pub Vec<f32>);
|
||||
|
||||
impl FromSql for Embedding {
|
||||
fn column_result(value: ValueRef) -> FromSqlResult<Self> {
|
||||
let bytes = value.as_blob()?;
|
||||
let embedding: Result<Vec<f32>, Box<bincode::ErrorKind>> = bincode::deserialize(bytes);
|
||||
if embedding.is_err() {
|
||||
return Err(rusqlite::types::FromSqlError::Other(embedding.unwrap_err()));
|
||||
}
|
||||
return Ok(Embedding(embedding.unwrap()));
|
||||
}
|
||||
}
|
||||
|
||||
pub struct VectorDatabase {
|
||||
db: rusqlite::Connection,
|
||||
}
|
||||
|
||||
impl VectorDatabase {
|
||||
pub fn new(path: String) -> Result<Self> {
|
||||
let this = Self {
|
||||
db: rusqlite::Connection::open(path)?,
|
||||
};
|
||||
this.initialize_database()?;
|
||||
Ok(this)
|
||||
}
|
||||
|
||||
fn initialize_database(&self) -> Result<()> {
|
||||
rusqlite::vtab::array::load_module(&self.db)?;
|
||||
|
||||
// This will create the database if it doesnt exist
|
||||
|
||||
// Initialize Vector Databasing Tables
|
||||
self.db.execute(
|
||||
"CREATE TABLE IF NOT EXISTS worktrees (
|
||||
id INTEGER PRIMARY KEY AUTOINCREMENT,
|
||||
absolute_path VARCHAR NOT NULL
|
||||
);
|
||||
CREATE UNIQUE INDEX IF NOT EXISTS worktrees_absolute_path ON worktrees (absolute_path);
|
||||
",
|
||||
[],
|
||||
)?;
|
||||
|
||||
self.db.execute(
|
||||
"CREATE TABLE IF NOT EXISTS files (
|
||||
id INTEGER PRIMARY KEY AUTOINCREMENT,
|
||||
worktree_id INTEGER NOT NULL,
|
||||
relative_path VARCHAR NOT NULL,
|
||||
mtime_seconds INTEGER NOT NULL,
|
||||
mtime_nanos INTEGER NOT NULL,
|
||||
vector_store_version INTEGER NOT NULL,
|
||||
FOREIGN KEY(worktree_id) REFERENCES worktrees(id) ON DELETE CASCADE
|
||||
)",
|
||||
[],
|
||||
)?;
|
||||
|
||||
self.db.execute(
|
||||
"CREATE TABLE IF NOT EXISTS documents (
|
||||
id INTEGER PRIMARY KEY AUTOINCREMENT,
|
||||
file_id INTEGER NOT NULL,
|
||||
offset INTEGER NOT NULL,
|
||||
name VARCHAR NOT NULL,
|
||||
embedding BLOB NOT NULL,
|
||||
FOREIGN KEY(file_id) REFERENCES files(id) ON DELETE CASCADE
|
||||
)",
|
||||
[],
|
||||
)?;
|
||||
|
||||
Ok(())
|
||||
}
|
||||
|
||||
pub fn delete_file(&self, worktree_id: i64, delete_path: PathBuf) -> Result<()> {
|
||||
self.db.execute(
|
||||
"DELETE FROM files WHERE worktree_id = ?1 AND relative_path = ?2",
|
||||
params![worktree_id, delete_path.to_str()],
|
||||
)?;
|
||||
Ok(())
|
||||
}
|
||||
|
||||
pub fn insert_file(&self, worktree_id: i64, indexed_file: ParsedFile) -> Result<()> {
|
||||
// Write to files table, and return generated id.
|
||||
self.db.execute(
|
||||
"
|
||||
DELETE FROM files WHERE worktree_id = ?1 AND relative_path = ?2;
|
||||
",
|
||||
params![worktree_id, indexed_file.path.to_str()],
|
||||
)?;
|
||||
let mtime = Timestamp::from(indexed_file.mtime);
|
||||
self.db.execute(
|
||||
"
|
||||
INSERT INTO files
|
||||
(worktree_id, relative_path, mtime_seconds, mtime_nanos, vector_store_version)
|
||||
VALUES
|
||||
(?1, ?2, $3, $4, $5);
|
||||
",
|
||||
params![
|
||||
worktree_id,
|
||||
indexed_file.path.to_str(),
|
||||
mtime.seconds,
|
||||
mtime.nanos,
|
||||
VECTOR_STORE_VERSION
|
||||
],
|
||||
)?;
|
||||
|
||||
let file_id = self.db.last_insert_rowid();
|
||||
|
||||
// Currently inserting at approximately 3400 documents a second
|
||||
// I imagine we can speed this up with a bulk insert of some kind.
|
||||
for document in indexed_file.documents {
|
||||
let embedding_blob = bincode::serialize(&document.embedding)?;
|
||||
|
||||
self.db.execute(
|
||||
"INSERT INTO documents (file_id, offset, name, embedding) VALUES (?1, ?2, ?3, ?4)",
|
||||
params![
|
||||
file_id,
|
||||
document.offset.to_string(),
|
||||
document.name,
|
||||
embedding_blob
|
||||
],
|
||||
)?;
|
||||
}
|
||||
|
||||
Ok(())
|
||||
}
|
||||
|
||||
pub fn find_or_create_worktree(&self, worktree_root_path: &Path) -> Result<i64> {
|
||||
// Check that the absolute path doesnt exist
|
||||
let mut worktree_query = self
|
||||
.db
|
||||
.prepare("SELECT id FROM worktrees WHERE absolute_path = ?1")?;
|
||||
|
||||
let worktree_id = worktree_query
|
||||
.query_row(params![worktree_root_path.to_string_lossy()], |row| {
|
||||
Ok(row.get::<_, i64>(0)?)
|
||||
})
|
||||
.map_err(|err| anyhow!(err));
|
||||
|
||||
if worktree_id.is_ok() {
|
||||
return worktree_id;
|
||||
}
|
||||
|
||||
// If worktree_id is Err, insert new worktree
|
||||
self.db.execute(
|
||||
"
|
||||
INSERT into worktrees (absolute_path) VALUES (?1)
|
||||
",
|
||||
params![worktree_root_path.to_string_lossy()],
|
||||
)?;
|
||||
Ok(self.db.last_insert_rowid())
|
||||
}
|
||||
|
||||
pub fn get_file_mtimes(&self, worktree_id: i64) -> Result<HashMap<PathBuf, SystemTime>> {
|
||||
let mut statement = self.db.prepare(
|
||||
"
|
||||
SELECT relative_path, mtime_seconds, mtime_nanos
|
||||
FROM files
|
||||
WHERE worktree_id = ?1
|
||||
ORDER BY relative_path",
|
||||
)?;
|
||||
let mut result: HashMap<PathBuf, SystemTime> = HashMap::new();
|
||||
for row in statement.query_map(params![worktree_id], |row| {
|
||||
Ok((
|
||||
row.get::<_, String>(0)?.into(),
|
||||
Timestamp {
|
||||
seconds: row.get(1)?,
|
||||
nanos: row.get(2)?,
|
||||
}
|
||||
.into(),
|
||||
))
|
||||
})? {
|
||||
let row = row?;
|
||||
result.insert(row.0, row.1);
|
||||
}
|
||||
Ok(result)
|
||||
}
|
||||
|
||||
pub fn top_k_search(
|
||||
&self,
|
||||
worktree_ids: &[i64],
|
||||
query_embedding: &Vec<f32>,
|
||||
limit: usize,
|
||||
) -> Result<Vec<(i64, PathBuf, usize, String)>> {
|
||||
let mut results = Vec::<(i64, f32)>::with_capacity(limit + 1);
|
||||
self.for_each_document(&worktree_ids, |id, embedding| {
|
||||
let similarity = dot(&embedding, &query_embedding);
|
||||
let ix = match results
|
||||
.binary_search_by(|(_, s)| similarity.partial_cmp(&s).unwrap_or(Ordering::Equal))
|
||||
{
|
||||
Ok(ix) => ix,
|
||||
Err(ix) => ix,
|
||||
};
|
||||
results.insert(ix, (id, similarity));
|
||||
results.truncate(limit);
|
||||
})?;
|
||||
|
||||
let ids = results.into_iter().map(|(id, _)| id).collect::<Vec<_>>();
|
||||
self.get_documents_by_ids(&ids)
|
||||
}
|
||||
|
||||
fn for_each_document(
|
||||
&self,
|
||||
worktree_ids: &[i64],
|
||||
mut f: impl FnMut(i64, Vec<f32>),
|
||||
) -> Result<()> {
|
||||
let mut query_statement = self.db.prepare(
|
||||
"
|
||||
SELECT
|
||||
documents.id, documents.embedding
|
||||
FROM
|
||||
documents, files
|
||||
WHERE
|
||||
documents.file_id = files.id AND
|
||||
files.worktree_id IN rarray(?)
|
||||
",
|
||||
)?;
|
||||
|
||||
query_statement
|
||||
.query_map(params![ids_to_sql(worktree_ids)], |row| {
|
||||
Ok((row.get(0)?, row.get::<_, Embedding>(1)?))
|
||||
})?
|
||||
.filter_map(|row| row.ok())
|
||||
.for_each(|(id, embedding)| f(id, embedding.0));
|
||||
Ok(())
|
||||
}
|
||||
|
||||
fn get_documents_by_ids(&self, ids: &[i64]) -> Result<Vec<(i64, PathBuf, usize, String)>> {
|
||||
let mut statement = self.db.prepare(
|
||||
"
|
||||
SELECT
|
||||
documents.id, files.worktree_id, files.relative_path, documents.offset, documents.name
|
||||
FROM
|
||||
documents, files
|
||||
WHERE
|
||||
documents.file_id = files.id AND
|
||||
documents.id in rarray(?)
|
||||
",
|
||||
)?;
|
||||
|
||||
let result_iter = statement.query_map(params![ids_to_sql(ids)], |row| {
|
||||
Ok((
|
||||
row.get::<_, i64>(0)?,
|
||||
row.get::<_, i64>(1)?,
|
||||
row.get::<_, String>(2)?.into(),
|
||||
row.get(3)?,
|
||||
row.get(4)?,
|
||||
))
|
||||
})?;
|
||||
|
||||
let mut values_by_id = HashMap::<i64, (i64, PathBuf, usize, String)>::default();
|
||||
for row in result_iter {
|
||||
let (id, worktree_id, path, offset, name) = row?;
|
||||
values_by_id.insert(id, (worktree_id, path, offset, name));
|
||||
}
|
||||
|
||||
let mut results = Vec::with_capacity(ids.len());
|
||||
for id in ids {
|
||||
let value = values_by_id
|
||||
.remove(id)
|
||||
.ok_or(anyhow!("missing document id {}", id))?;
|
||||
results.push(value);
|
||||
}
|
||||
|
||||
Ok(results)
|
||||
}
|
||||
}
|
||||
|
||||
fn ids_to_sql(ids: &[i64]) -> Rc<Vec<rusqlite::types::Value>> {
|
||||
Rc::new(
|
||||
ids.iter()
|
||||
.copied()
|
||||
.map(|v| rusqlite::types::Value::from(v))
|
||||
.collect::<Vec<_>>(),
|
||||
)
|
||||
}
|
||||
|
||||
pub(crate) fn dot(vec_a: &[f32], vec_b: &[f32]) -> f32 {
|
||||
let len = vec_a.len();
|
||||
assert_eq!(len, vec_b.len());
|
||||
|
||||
let mut result = 0.0;
|
||||
unsafe {
|
||||
matrixmultiply::sgemm(
|
||||
1,
|
||||
len,
|
||||
1,
|
||||
1.0,
|
||||
vec_a.as_ptr(),
|
||||
len as isize,
|
||||
1,
|
||||
vec_b.as_ptr(),
|
||||
1,
|
||||
len as isize,
|
||||
0.0,
|
||||
&mut result as *mut f32,
|
||||
1,
|
||||
1,
|
||||
);
|
||||
}
|
||||
result
|
||||
}
|
166
crates/vector_store/src/embedding.rs
Normal file
166
crates/vector_store/src/embedding.rs
Normal file
|
@ -0,0 +1,166 @@
|
|||
use anyhow::{anyhow, Result};
|
||||
use async_trait::async_trait;
|
||||
use futures::AsyncReadExt;
|
||||
use gpui::executor::Background;
|
||||
use gpui::serde_json;
|
||||
use isahc::http::StatusCode;
|
||||
use isahc::prelude::Configurable;
|
||||
use isahc::{AsyncBody, Response};
|
||||
use lazy_static::lazy_static;
|
||||
use serde::{Deserialize, Serialize};
|
||||
use std::env;
|
||||
use std::sync::Arc;
|
||||
use std::time::Duration;
|
||||
use tiktoken_rs::{cl100k_base, CoreBPE};
|
||||
use util::http::{HttpClient, Request};
|
||||
|
||||
lazy_static! {
|
||||
static ref OPENAI_API_KEY: Option<String> = env::var("OPENAI_API_KEY").ok();
|
||||
static ref OPENAI_BPE_TOKENIZER: CoreBPE = cl100k_base().unwrap();
|
||||
}
|
||||
|
||||
#[derive(Clone)]
|
||||
pub struct OpenAIEmbeddings {
|
||||
pub client: Arc<dyn HttpClient>,
|
||||
pub executor: Arc<Background>,
|
||||
}
|
||||
|
||||
#[derive(Serialize)]
|
||||
struct OpenAIEmbeddingRequest<'a> {
|
||||
model: &'static str,
|
||||
input: Vec<&'a str>,
|
||||
}
|
||||
|
||||
#[derive(Deserialize)]
|
||||
struct OpenAIEmbeddingResponse {
|
||||
data: Vec<OpenAIEmbedding>,
|
||||
usage: OpenAIEmbeddingUsage,
|
||||
}
|
||||
|
||||
#[derive(Debug, Deserialize)]
|
||||
struct OpenAIEmbedding {
|
||||
embedding: Vec<f32>,
|
||||
index: usize,
|
||||
object: String,
|
||||
}
|
||||
|
||||
#[derive(Deserialize)]
|
||||
struct OpenAIEmbeddingUsage {
|
||||
prompt_tokens: usize,
|
||||
total_tokens: usize,
|
||||
}
|
||||
|
||||
#[async_trait]
|
||||
pub trait EmbeddingProvider: Sync + Send {
|
||||
async fn embed_batch(&self, spans: Vec<&str>) -> Result<Vec<Vec<f32>>>;
|
||||
}
|
||||
|
||||
pub struct DummyEmbeddings {}
|
||||
|
||||
#[async_trait]
|
||||
impl EmbeddingProvider for DummyEmbeddings {
|
||||
async fn embed_batch(&self, spans: Vec<&str>) -> Result<Vec<Vec<f32>>> {
|
||||
// 1024 is the OpenAI Embeddings size for ada models.
|
||||
// the model we will likely be starting with.
|
||||
let dummy_vec = vec![0.32 as f32; 1536];
|
||||
return Ok(vec![dummy_vec; spans.len()]);
|
||||
}
|
||||
}
|
||||
|
||||
impl OpenAIEmbeddings {
|
||||
async fn truncate(span: String) -> String {
|
||||
let mut tokens = OPENAI_BPE_TOKENIZER.encode_with_special_tokens(span.as_ref());
|
||||
if tokens.len() > 8190 {
|
||||
tokens.truncate(8190);
|
||||
let result = OPENAI_BPE_TOKENIZER.decode(tokens.clone());
|
||||
if result.is_ok() {
|
||||
let transformed = result.unwrap();
|
||||
// assert_ne!(transformed, span);
|
||||
return transformed;
|
||||
}
|
||||
}
|
||||
|
||||
return span.to_string();
|
||||
}
|
||||
|
||||
async fn send_request(&self, api_key: &str, spans: Vec<&str>) -> Result<Response<AsyncBody>> {
|
||||
let request = Request::post("https://api.openai.com/v1/embeddings")
|
||||
.redirect_policy(isahc::config::RedirectPolicy::Follow)
|
||||
.header("Content-Type", "application/json")
|
||||
.header("Authorization", format!("Bearer {}", api_key))
|
||||
.body(
|
||||
serde_json::to_string(&OpenAIEmbeddingRequest {
|
||||
input: spans.clone(),
|
||||
model: "text-embedding-ada-002",
|
||||
})
|
||||
.unwrap()
|
||||
.into(),
|
||||
)?;
|
||||
|
||||
Ok(self.client.send(request).await?)
|
||||
}
|
||||
}
|
||||
|
||||
#[async_trait]
|
||||
impl EmbeddingProvider for OpenAIEmbeddings {
|
||||
async fn embed_batch(&self, spans: Vec<&str>) -> Result<Vec<Vec<f32>>> {
|
||||
const BACKOFF_SECONDS: [usize; 3] = [65, 180, 360];
|
||||
const MAX_RETRIES: usize = 3;
|
||||
|
||||
let api_key = OPENAI_API_KEY
|
||||
.as_ref()
|
||||
.ok_or_else(|| anyhow!("no api key"))?;
|
||||
|
||||
let mut request_number = 0;
|
||||
let mut response: Response<AsyncBody>;
|
||||
let mut spans: Vec<String> = spans.iter().map(|x| x.to_string()).collect();
|
||||
while request_number < MAX_RETRIES {
|
||||
response = self
|
||||
.send_request(api_key, spans.iter().map(|x| &**x).collect())
|
||||
.await?;
|
||||
request_number += 1;
|
||||
|
||||
if request_number + 1 == MAX_RETRIES && response.status() != StatusCode::OK {
|
||||
return Err(anyhow!(
|
||||
"openai max retries, error: {:?}",
|
||||
&response.status()
|
||||
));
|
||||
}
|
||||
|
||||
match response.status() {
|
||||
StatusCode::TOO_MANY_REQUESTS => {
|
||||
let delay = Duration::from_secs(BACKOFF_SECONDS[request_number - 1] as u64);
|
||||
self.executor.timer(delay).await;
|
||||
}
|
||||
StatusCode::BAD_REQUEST => {
|
||||
log::info!("BAD REQUEST: {:?}", &response.status());
|
||||
// Don't worry about delaying bad request, as we can assume
|
||||
// we haven't been rate limited yet.
|
||||
for span in spans.iter_mut() {
|
||||
*span = Self::truncate(span.to_string()).await;
|
||||
}
|
||||
}
|
||||
StatusCode::OK => {
|
||||
let mut body = String::new();
|
||||
response.body_mut().read_to_string(&mut body).await?;
|
||||
let response: OpenAIEmbeddingResponse = serde_json::from_str(&body)?;
|
||||
|
||||
log::info!(
|
||||
"openai embedding completed. tokens: {:?}",
|
||||
response.usage.total_tokens
|
||||
);
|
||||
return Ok(response
|
||||
.data
|
||||
.into_iter()
|
||||
.map(|embedding| embedding.embedding)
|
||||
.collect());
|
||||
}
|
||||
_ => {
|
||||
return Err(anyhow!("openai embedding failed {}", response.status()));
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
Err(anyhow!("openai embedding failed"))
|
||||
}
|
||||
}
|
172
crates/vector_store/src/modal.rs
Normal file
172
crates/vector_store/src/modal.rs
Normal file
|
@ -0,0 +1,172 @@
|
|||
use crate::{SearchResult, VectorStore};
|
||||
use editor::{scroll::autoscroll::Autoscroll, Editor};
|
||||
use gpui::{
|
||||
actions, elements::*, AnyElement, AppContext, ModelHandle, MouseState, Task, ViewContext,
|
||||
WeakViewHandle,
|
||||
};
|
||||
use picker::{Picker, PickerDelegate, PickerEvent};
|
||||
use project::{Project, ProjectPath};
|
||||
use std::{collections::HashMap, sync::Arc, time::Duration};
|
||||
use util::ResultExt;
|
||||
use workspace::Workspace;
|
||||
|
||||
const MIN_QUERY_LEN: usize = 5;
|
||||
const EMBEDDING_DEBOUNCE_INTERVAL: Duration = Duration::from_millis(500);
|
||||
|
||||
actions!(semantic_search, [Toggle]);
|
||||
|
||||
pub type SemanticSearch = Picker<SemanticSearchDelegate>;
|
||||
|
||||
pub struct SemanticSearchDelegate {
|
||||
workspace: WeakViewHandle<Workspace>,
|
||||
project: ModelHandle<Project>,
|
||||
vector_store: ModelHandle<VectorStore>,
|
||||
selected_match_index: usize,
|
||||
matches: Vec<SearchResult>,
|
||||
history: HashMap<String, Vec<SearchResult>>,
|
||||
}
|
||||
|
||||
impl SemanticSearchDelegate {
|
||||
// This is currently searching on every keystroke,
|
||||
// This is wildly overkill, and has the potential to get expensive
|
||||
// We will need to update this to throttle searching
|
||||
pub fn new(
|
||||
workspace: WeakViewHandle<Workspace>,
|
||||
project: ModelHandle<Project>,
|
||||
vector_store: ModelHandle<VectorStore>,
|
||||
) -> Self {
|
||||
Self {
|
||||
workspace,
|
||||
project,
|
||||
vector_store,
|
||||
selected_match_index: 0,
|
||||
matches: vec![],
|
||||
history: HashMap::new(),
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
impl PickerDelegate for SemanticSearchDelegate {
|
||||
fn placeholder_text(&self) -> Arc<str> {
|
||||
"Search repository in natural language...".into()
|
||||
}
|
||||
|
||||
fn confirm(&mut self, _: bool, cx: &mut ViewContext<SemanticSearch>) {
|
||||
if let Some(search_result) = self.matches.get(self.selected_match_index) {
|
||||
// Open Buffer
|
||||
let search_result = search_result.clone();
|
||||
let buffer = self.project.update(cx, |project, cx| {
|
||||
project.open_buffer(
|
||||
ProjectPath {
|
||||
worktree_id: search_result.worktree_id,
|
||||
path: search_result.file_path.clone().into(),
|
||||
},
|
||||
cx,
|
||||
)
|
||||
});
|
||||
|
||||
let workspace = self.workspace.clone();
|
||||
let position = search_result.clone().offset;
|
||||
cx.spawn(|_, mut cx| async move {
|
||||
let buffer = buffer.await?;
|
||||
workspace.update(&mut cx, |workspace, cx| {
|
||||
let editor = workspace.open_project_item::<Editor>(buffer, cx);
|
||||
editor.update(cx, |editor, cx| {
|
||||
editor.change_selections(Some(Autoscroll::center()), cx, |s| {
|
||||
s.select_ranges([position..position])
|
||||
});
|
||||
});
|
||||
})?;
|
||||
Ok::<_, anyhow::Error>(())
|
||||
})
|
||||
.detach_and_log_err(cx);
|
||||
cx.emit(PickerEvent::Dismiss);
|
||||
}
|
||||
}
|
||||
|
||||
fn dismissed(&mut self, _cx: &mut ViewContext<SemanticSearch>) {}
|
||||
|
||||
fn match_count(&self) -> usize {
|
||||
self.matches.len()
|
||||
}
|
||||
|
||||
fn selected_index(&self) -> usize {
|
||||
self.selected_match_index
|
||||
}
|
||||
|
||||
fn set_selected_index(&mut self, ix: usize, _cx: &mut ViewContext<SemanticSearch>) {
|
||||
self.selected_match_index = ix;
|
||||
}
|
||||
|
||||
fn update_matches(&mut self, query: String, cx: &mut ViewContext<SemanticSearch>) -> Task<()> {
|
||||
log::info!("Searching for {:?}...", query);
|
||||
if query.len() < MIN_QUERY_LEN {
|
||||
log::info!("Query below minimum length");
|
||||
return Task::ready(());
|
||||
}
|
||||
|
||||
let vector_store = self.vector_store.clone();
|
||||
let project = self.project.clone();
|
||||
cx.spawn(|this, mut cx| async move {
|
||||
cx.background().timer(EMBEDDING_DEBOUNCE_INTERVAL).await;
|
||||
|
||||
let retrieved_cached = this.update(&mut cx, |this, _| {
|
||||
let delegate = this.delegate_mut();
|
||||
if delegate.history.contains_key(&query) {
|
||||
let historic_results = delegate.history.get(&query).unwrap().to_owned();
|
||||
delegate.matches = historic_results.clone();
|
||||
true
|
||||
} else {
|
||||
false
|
||||
}
|
||||
});
|
||||
|
||||
if let Some(retrieved) = retrieved_cached.log_err() {
|
||||
if !retrieved {
|
||||
let task = vector_store.update(&mut cx, |store, cx| {
|
||||
store.search(project.clone(), query.to_string(), 10, cx)
|
||||
});
|
||||
|
||||
if let Some(results) = task.await.log_err() {
|
||||
log::info!("Not queried previously, searching...");
|
||||
this.update(&mut cx, |this, _| {
|
||||
let delegate = this.delegate_mut();
|
||||
delegate.matches = results.clone();
|
||||
delegate.history.insert(query, results);
|
||||
})
|
||||
.ok();
|
||||
}
|
||||
} else {
|
||||
log::info!("Already queried, retrieved directly from cached history");
|
||||
}
|
||||
}
|
||||
})
|
||||
}
|
||||
|
||||
fn render_match(
|
||||
&self,
|
||||
ix: usize,
|
||||
mouse_state: &mut MouseState,
|
||||
selected: bool,
|
||||
cx: &AppContext,
|
||||
) -> AnyElement<Picker<Self>> {
|
||||
let theme = theme::current(cx);
|
||||
let style = &theme.picker.item;
|
||||
let current_style = style.in_state(selected).style_for(mouse_state);
|
||||
|
||||
let search_result = &self.matches[ix];
|
||||
|
||||
let path = search_result.file_path.to_string_lossy();
|
||||
let name = search_result.name.clone();
|
||||
|
||||
Flex::column()
|
||||
.with_child(Text::new(name, current_style.label.text.clone()).with_soft_wrap(false))
|
||||
.with_child(Label::new(
|
||||
path.to_string(),
|
||||
style.inactive_state().default.label.clone(),
|
||||
))
|
||||
.contained()
|
||||
.with_style(current_style.container)
|
||||
.into_any()
|
||||
}
|
||||
}
|
118
crates/vector_store/src/parsing.rs
Normal file
118
crates/vector_store/src/parsing.rs
Normal file
|
@ -0,0 +1,118 @@
|
|||
use std::{path::PathBuf, sync::Arc, time::SystemTime};
|
||||
|
||||
use anyhow::{anyhow, Ok, Result};
|
||||
use project::Fs;
|
||||
use tree_sitter::{Parser, QueryCursor};
|
||||
|
||||
use crate::PendingFile;
|
||||
|
||||
#[derive(Debug, PartialEq, Clone)]
|
||||
pub struct Document {
|
||||
pub offset: usize,
|
||||
pub name: String,
|
||||
pub embedding: Vec<f32>,
|
||||
}
|
||||
|
||||
#[derive(Debug, PartialEq, Clone)]
|
||||
pub struct ParsedFile {
|
||||
pub path: PathBuf,
|
||||
pub mtime: SystemTime,
|
||||
pub documents: Vec<Document>,
|
||||
}
|
||||
|
||||
const CODE_CONTEXT_TEMPLATE: &str =
|
||||
"The below code snippet is from file '<path>'\n\n```<language>\n<item>\n```";
|
||||
|
||||
pub struct CodeContextRetriever {
|
||||
pub parser: Parser,
|
||||
pub cursor: QueryCursor,
|
||||
pub fs: Arc<dyn Fs>,
|
||||
}
|
||||
|
||||
impl CodeContextRetriever {
|
||||
pub async fn parse_file(
|
||||
&mut self,
|
||||
pending_file: PendingFile,
|
||||
) -> Result<(ParsedFile, Vec<String>)> {
|
||||
let grammar = pending_file
|
||||
.language
|
||||
.grammar()
|
||||
.ok_or_else(|| anyhow!("no grammar for language"))?;
|
||||
let embedding_config = grammar
|
||||
.embedding_config
|
||||
.as_ref()
|
||||
.ok_or_else(|| anyhow!("no embedding queries"))?;
|
||||
|
||||
let content = self.fs.load(&pending_file.absolute_path).await?;
|
||||
|
||||
self.parser.set_language(grammar.ts_language).unwrap();
|
||||
|
||||
let tree = self
|
||||
.parser
|
||||
.parse(&content, None)
|
||||
.ok_or_else(|| anyhow!("parsing failed"))?;
|
||||
|
||||
let mut documents = Vec::new();
|
||||
let mut context_spans = Vec::new();
|
||||
|
||||
// Iterate through query matches
|
||||
for mat in self.cursor.matches(
|
||||
&embedding_config.query,
|
||||
tree.root_node(),
|
||||
content.as_bytes(),
|
||||
) {
|
||||
// log::info!("-----MATCH-----");
|
||||
|
||||
let mut name: Vec<&str> = vec![];
|
||||
let mut item: Option<&str> = None;
|
||||
let mut offset: Option<usize> = None;
|
||||
for capture in mat.captures {
|
||||
if capture.index == embedding_config.item_capture_ix {
|
||||
offset = Some(capture.node.byte_range().start);
|
||||
item = content.get(capture.node.byte_range());
|
||||
} else if capture.index == embedding_config.name_capture_ix {
|
||||
if let Some(name_content) = content.get(capture.node.byte_range()) {
|
||||
name.push(name_content);
|
||||
}
|
||||
}
|
||||
|
||||
if let Some(context_capture_ix) = embedding_config.context_capture_ix {
|
||||
if capture.index == context_capture_ix {
|
||||
if let Some(context) = content.get(capture.node.byte_range()) {
|
||||
name.push(context);
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
if item.is_some() && offset.is_some() && name.len() > 0 {
|
||||
let context_span = CODE_CONTEXT_TEMPLATE
|
||||
.replace("<path>", pending_file.relative_path.to_str().unwrap())
|
||||
.replace("<language>", &pending_file.language.name().to_lowercase())
|
||||
.replace("<item>", item.unwrap());
|
||||
|
||||
let mut truncated_span = context_span.clone();
|
||||
truncated_span.truncate(100);
|
||||
|
||||
// log::info!("Name: {:?}", name);
|
||||
// log::info!("Span: {:?}", truncated_span);
|
||||
|
||||
context_spans.push(context_span);
|
||||
documents.push(Document {
|
||||
name: name.join(" "),
|
||||
offset: offset.unwrap(),
|
||||
embedding: Vec::new(),
|
||||
})
|
||||
}
|
||||
}
|
||||
|
||||
return Ok((
|
||||
ParsedFile {
|
||||
path: pending_file.relative_path,
|
||||
mtime: pending_file.modified_time,
|
||||
documents,
|
||||
},
|
||||
context_spans,
|
||||
));
|
||||
}
|
||||
}
|
770
crates/vector_store/src/vector_store.rs
Normal file
770
crates/vector_store/src/vector_store.rs
Normal file
|
@ -0,0 +1,770 @@
|
|||
mod db;
|
||||
mod embedding;
|
||||
mod modal;
|
||||
mod parsing;
|
||||
mod vector_store_settings;
|
||||
|
||||
#[cfg(test)]
|
||||
mod vector_store_tests;
|
||||
|
||||
use crate::vector_store_settings::VectorStoreSettings;
|
||||
use anyhow::{anyhow, Result};
|
||||
use db::VectorDatabase;
|
||||
use embedding::{EmbeddingProvider, OpenAIEmbeddings};
|
||||
use futures::{channel::oneshot, Future};
|
||||
use gpui::{
|
||||
AppContext, AsyncAppContext, Entity, ModelContext, ModelHandle, Task, ViewContext,
|
||||
WeakModelHandle,
|
||||
};
|
||||
use language::{Language, LanguageRegistry};
|
||||
use modal::{SemanticSearch, SemanticSearchDelegate, Toggle};
|
||||
use parsing::{CodeContextRetriever, ParsedFile};
|
||||
use project::{Fs, PathChange, Project, ProjectEntryId, WorktreeId};
|
||||
use smol::channel;
|
||||
use std::{
|
||||
collections::HashMap,
|
||||
path::{Path, PathBuf},
|
||||
sync::Arc,
|
||||
time::{Duration, Instant, SystemTime},
|
||||
};
|
||||
use tree_sitter::{Parser, QueryCursor};
|
||||
use util::{
|
||||
channel::{ReleaseChannel, RELEASE_CHANNEL, RELEASE_CHANNEL_NAME},
|
||||
http::HttpClient,
|
||||
paths::EMBEDDINGS_DIR,
|
||||
ResultExt,
|
||||
};
|
||||
use workspace::{Workspace, WorkspaceCreated};
|
||||
|
||||
const VECTOR_STORE_VERSION: usize = 0;
|
||||
const EMBEDDINGS_BATCH_SIZE: usize = 150;
|
||||
|
||||
pub fn init(
|
||||
fs: Arc<dyn Fs>,
|
||||
http_client: Arc<dyn HttpClient>,
|
||||
language_registry: Arc<LanguageRegistry>,
|
||||
cx: &mut AppContext,
|
||||
) {
|
||||
settings::register::<VectorStoreSettings>(cx);
|
||||
|
||||
let db_file_path = EMBEDDINGS_DIR
|
||||
.join(Path::new(RELEASE_CHANNEL_NAME.as_str()))
|
||||
.join("embeddings_db");
|
||||
|
||||
SemanticSearch::init(cx);
|
||||
cx.add_action(
|
||||
|workspace: &mut Workspace, _: &Toggle, cx: &mut ViewContext<Workspace>| {
|
||||
if cx.has_global::<ModelHandle<VectorStore>>() {
|
||||
let vector_store = cx.global::<ModelHandle<VectorStore>>().clone();
|
||||
workspace.toggle_modal(cx, |workspace, cx| {
|
||||
let project = workspace.project().clone();
|
||||
let workspace = cx.weak_handle();
|
||||
cx.add_view(|cx| {
|
||||
SemanticSearch::new(
|
||||
SemanticSearchDelegate::new(workspace, project, vector_store),
|
||||
cx,
|
||||
)
|
||||
})
|
||||
});
|
||||
}
|
||||
},
|
||||
);
|
||||
|
||||
if *RELEASE_CHANNEL == ReleaseChannel::Stable
|
||||
|| !settings::get::<VectorStoreSettings>(cx).enabled
|
||||
{
|
||||
return;
|
||||
}
|
||||
|
||||
cx.spawn(move |mut cx| async move {
|
||||
let vector_store = VectorStore::new(
|
||||
fs,
|
||||
db_file_path,
|
||||
// Arc::new(embedding::DummyEmbeddings {}),
|
||||
Arc::new(OpenAIEmbeddings {
|
||||
client: http_client,
|
||||
executor: cx.background(),
|
||||
}),
|
||||
language_registry,
|
||||
cx.clone(),
|
||||
)
|
||||
.await?;
|
||||
|
||||
cx.update(|cx| {
|
||||
cx.set_global(vector_store.clone());
|
||||
cx.subscribe_global::<WorkspaceCreated, _>({
|
||||
let vector_store = vector_store.clone();
|
||||
move |event, cx| {
|
||||
let workspace = &event.0;
|
||||
if let Some(workspace) = workspace.upgrade(cx) {
|
||||
let project = workspace.read(cx).project().clone();
|
||||
if project.read(cx).is_local() {
|
||||
vector_store.update(cx, |store, cx| {
|
||||
store.add_project(project, cx).detach();
|
||||
});
|
||||
}
|
||||
}
|
||||
}
|
||||
})
|
||||
.detach();
|
||||
});
|
||||
|
||||
anyhow::Ok(())
|
||||
})
|
||||
.detach();
|
||||
}
|
||||
|
||||
pub struct VectorStore {
|
||||
fs: Arc<dyn Fs>,
|
||||
database_url: Arc<PathBuf>,
|
||||
embedding_provider: Arc<dyn EmbeddingProvider>,
|
||||
language_registry: Arc<LanguageRegistry>,
|
||||
db_update_tx: channel::Sender<DbOperation>,
|
||||
parsing_files_tx: channel::Sender<PendingFile>,
|
||||
_db_update_task: Task<()>,
|
||||
_embed_batch_task: Task<()>,
|
||||
_batch_files_task: Task<()>,
|
||||
_parsing_files_tasks: Vec<Task<()>>,
|
||||
projects: HashMap<WeakModelHandle<Project>, ProjectState>,
|
||||
}
|
||||
|
||||
struct ProjectState {
|
||||
worktree_db_ids: Vec<(WorktreeId, i64)>,
|
||||
pending_files: HashMap<PathBuf, (PendingFile, SystemTime)>,
|
||||
_subscription: gpui::Subscription,
|
||||
}
|
||||
|
||||
impl ProjectState {
|
||||
fn db_id_for_worktree_id(&self, id: WorktreeId) -> Option<i64> {
|
||||
self.worktree_db_ids
|
||||
.iter()
|
||||
.find_map(|(worktree_id, db_id)| {
|
||||
if *worktree_id == id {
|
||||
Some(*db_id)
|
||||
} else {
|
||||
None
|
||||
}
|
||||
})
|
||||
}
|
||||
|
||||
fn worktree_id_for_db_id(&self, id: i64) -> Option<WorktreeId> {
|
||||
self.worktree_db_ids
|
||||
.iter()
|
||||
.find_map(|(worktree_id, db_id)| {
|
||||
if *db_id == id {
|
||||
Some(*worktree_id)
|
||||
} else {
|
||||
None
|
||||
}
|
||||
})
|
||||
}
|
||||
|
||||
fn update_pending_files(&mut self, pending_file: PendingFile, indexing_time: SystemTime) {
|
||||
// If Pending File Already Exists, Replace it with the new one
|
||||
// but keep the old indexing time
|
||||
if let Some(old_file) = self
|
||||
.pending_files
|
||||
.remove(&pending_file.relative_path.clone())
|
||||
{
|
||||
self.pending_files.insert(
|
||||
pending_file.relative_path.clone(),
|
||||
(pending_file, old_file.1),
|
||||
);
|
||||
} else {
|
||||
self.pending_files.insert(
|
||||
pending_file.relative_path.clone(),
|
||||
(pending_file, indexing_time),
|
||||
);
|
||||
};
|
||||
}
|
||||
|
||||
fn get_outstanding_files(&mut self) -> Vec<PendingFile> {
|
||||
let mut outstanding_files = vec![];
|
||||
let mut remove_keys = vec![];
|
||||
for key in self.pending_files.keys().into_iter() {
|
||||
if let Some(pending_details) = self.pending_files.get(key) {
|
||||
let (pending_file, index_time) = pending_details;
|
||||
if index_time <= &SystemTime::now() {
|
||||
outstanding_files.push(pending_file.clone());
|
||||
remove_keys.push(key.clone());
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
for key in remove_keys.iter() {
|
||||
self.pending_files.remove(key);
|
||||
}
|
||||
|
||||
return outstanding_files;
|
||||
}
|
||||
}
|
||||
|
||||
#[derive(Clone, Debug)]
|
||||
pub struct PendingFile {
|
||||
worktree_db_id: i64,
|
||||
relative_path: PathBuf,
|
||||
absolute_path: PathBuf,
|
||||
language: Arc<Language>,
|
||||
modified_time: SystemTime,
|
||||
}
|
||||
|
||||
#[derive(Debug, Clone)]
|
||||
pub struct SearchResult {
|
||||
pub worktree_id: WorktreeId,
|
||||
pub name: String,
|
||||
pub offset: usize,
|
||||
pub file_path: PathBuf,
|
||||
}
|
||||
|
||||
enum DbOperation {
|
||||
InsertFile {
|
||||
worktree_id: i64,
|
||||
indexed_file: ParsedFile,
|
||||
},
|
||||
Delete {
|
||||
worktree_id: i64,
|
||||
path: PathBuf,
|
||||
},
|
||||
FindOrCreateWorktree {
|
||||
path: PathBuf,
|
||||
sender: oneshot::Sender<Result<i64>>,
|
||||
},
|
||||
FileMTimes {
|
||||
worktree_id: i64,
|
||||
sender: oneshot::Sender<Result<HashMap<PathBuf, SystemTime>>>,
|
||||
},
|
||||
}
|
||||
|
||||
enum EmbeddingJob {
|
||||
Enqueue {
|
||||
worktree_id: i64,
|
||||
parsed_file: ParsedFile,
|
||||
document_spans: Vec<String>,
|
||||
},
|
||||
Flush,
|
||||
}
|
||||
|
||||
impl VectorStore {
|
||||
async fn new(
|
||||
fs: Arc<dyn Fs>,
|
||||
database_url: PathBuf,
|
||||
embedding_provider: Arc<dyn EmbeddingProvider>,
|
||||
language_registry: Arc<LanguageRegistry>,
|
||||
mut cx: AsyncAppContext,
|
||||
) -> Result<ModelHandle<Self>> {
|
||||
let database_url = Arc::new(database_url);
|
||||
|
||||
let db = cx
|
||||
.background()
|
||||
.spawn({
|
||||
let fs = fs.clone();
|
||||
let database_url = database_url.clone();
|
||||
async move {
|
||||
if let Some(db_directory) = database_url.parent() {
|
||||
fs.create_dir(db_directory).await.log_err();
|
||||
}
|
||||
|
||||
let db = VectorDatabase::new(database_url.to_string_lossy().to_string())?;
|
||||
anyhow::Ok(db)
|
||||
}
|
||||
})
|
||||
.await?;
|
||||
|
||||
Ok(cx.add_model(|cx| {
|
||||
// paths_tx -> embeddings_tx -> db_update_tx
|
||||
|
||||
//db_update_tx/rx: Updating Database
|
||||
let (db_update_tx, db_update_rx) = channel::unbounded();
|
||||
let _db_update_task = cx.background().spawn(async move {
|
||||
while let Ok(job) = db_update_rx.recv().await {
|
||||
match job {
|
||||
DbOperation::InsertFile {
|
||||
worktree_id,
|
||||
indexed_file,
|
||||
} => {
|
||||
db.insert_file(worktree_id, indexed_file).log_err();
|
||||
}
|
||||
DbOperation::Delete { worktree_id, path } => {
|
||||
db.delete_file(worktree_id, path).log_err();
|
||||
}
|
||||
DbOperation::FindOrCreateWorktree { path, sender } => {
|
||||
let id = db.find_or_create_worktree(&path);
|
||||
sender.send(id).ok();
|
||||
}
|
||||
DbOperation::FileMTimes {
|
||||
worktree_id: worktree_db_id,
|
||||
sender,
|
||||
} => {
|
||||
let file_mtimes = db.get_file_mtimes(worktree_db_id);
|
||||
sender.send(file_mtimes).ok();
|
||||
}
|
||||
}
|
||||
}
|
||||
});
|
||||
|
||||
// embed_tx/rx: Embed Batch and Send to Database
|
||||
let (embed_batch_tx, embed_batch_rx) =
|
||||
channel::unbounded::<Vec<(i64, ParsedFile, Vec<String>)>>();
|
||||
let _embed_batch_task = cx.background().spawn({
|
||||
let db_update_tx = db_update_tx.clone();
|
||||
let embedding_provider = embedding_provider.clone();
|
||||
async move {
|
||||
while let Ok(mut embeddings_queue) = embed_batch_rx.recv().await {
|
||||
// Construct Batch
|
||||
let mut document_spans = vec![];
|
||||
for (_, _, document_span) in embeddings_queue.iter() {
|
||||
document_spans.extend(document_span.iter().map(|s| s.as_str()));
|
||||
}
|
||||
|
||||
if let Ok(embeddings) = embedding_provider.embed_batch(document_spans).await
|
||||
{
|
||||
let mut i = 0;
|
||||
let mut j = 0;
|
||||
|
||||
for embedding in embeddings.iter() {
|
||||
while embeddings_queue[i].1.documents.len() == j {
|
||||
i += 1;
|
||||
j = 0;
|
||||
}
|
||||
|
||||
embeddings_queue[i].1.documents[j].embedding = embedding.to_owned();
|
||||
j += 1;
|
||||
}
|
||||
|
||||
for (worktree_id, indexed_file, _) in embeddings_queue.into_iter() {
|
||||
for document in indexed_file.documents.iter() {
|
||||
// TODO: Update this so it doesn't panic
|
||||
assert!(
|
||||
document.embedding.len() > 0,
|
||||
"Document Embedding Not Complete"
|
||||
);
|
||||
}
|
||||
|
||||
db_update_tx
|
||||
.send(DbOperation::InsertFile {
|
||||
worktree_id,
|
||||
indexed_file,
|
||||
})
|
||||
.await
|
||||
.unwrap();
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
});
|
||||
|
||||
// batch_tx/rx: Batch Files to Send for Embeddings
|
||||
let (batch_files_tx, batch_files_rx) = channel::unbounded::<EmbeddingJob>();
|
||||
let _batch_files_task = cx.background().spawn(async move {
|
||||
let mut queue_len = 0;
|
||||
let mut embeddings_queue = vec![];
|
||||
|
||||
while let Ok(job) = batch_files_rx.recv().await {
|
||||
let should_flush = match job {
|
||||
EmbeddingJob::Enqueue {
|
||||
document_spans,
|
||||
worktree_id,
|
||||
parsed_file,
|
||||
} => {
|
||||
queue_len += &document_spans.len();
|
||||
embeddings_queue.push((worktree_id, parsed_file, document_spans));
|
||||
queue_len >= EMBEDDINGS_BATCH_SIZE
|
||||
}
|
||||
EmbeddingJob::Flush => true,
|
||||
};
|
||||
|
||||
if should_flush {
|
||||
embed_batch_tx.try_send(embeddings_queue).unwrap();
|
||||
embeddings_queue = vec![];
|
||||
queue_len = 0;
|
||||
}
|
||||
}
|
||||
});
|
||||
|
||||
// parsing_files_tx/rx: Parsing Files to Embeddable Documents
|
||||
let (parsing_files_tx, parsing_files_rx) = channel::unbounded::<PendingFile>();
|
||||
|
||||
let mut _parsing_files_tasks = Vec::new();
|
||||
// for _ in 0..cx.background().num_cpus() {
|
||||
for _ in 0..1 {
|
||||
let fs = fs.clone();
|
||||
let parsing_files_rx = parsing_files_rx.clone();
|
||||
let batch_files_tx = batch_files_tx.clone();
|
||||
_parsing_files_tasks.push(cx.background().spawn(async move {
|
||||
let parser = Parser::new();
|
||||
let cursor = QueryCursor::new();
|
||||
let mut retriever = CodeContextRetriever { parser, cursor, fs };
|
||||
while let Ok(pending_file) = parsing_files_rx.recv().await {
|
||||
if let Some((indexed_file, document_spans)) =
|
||||
retriever.parse_file(pending_file.clone()).await.log_err()
|
||||
{
|
||||
batch_files_tx
|
||||
.try_send(EmbeddingJob::Enqueue {
|
||||
worktree_id: pending_file.worktree_db_id,
|
||||
parsed_file: indexed_file,
|
||||
document_spans,
|
||||
})
|
||||
.unwrap();
|
||||
}
|
||||
|
||||
if parsing_files_rx.len() == 0 {
|
||||
batch_files_tx.try_send(EmbeddingJob::Flush).unwrap();
|
||||
}
|
||||
}
|
||||
}));
|
||||
}
|
||||
|
||||
Self {
|
||||
fs,
|
||||
database_url,
|
||||
embedding_provider,
|
||||
language_registry,
|
||||
db_update_tx,
|
||||
parsing_files_tx,
|
||||
_db_update_task,
|
||||
_embed_batch_task,
|
||||
_batch_files_task,
|
||||
_parsing_files_tasks,
|
||||
projects: HashMap::new(),
|
||||
}
|
||||
}))
|
||||
}
|
||||
|
||||
fn find_or_create_worktree(&self, path: PathBuf) -> impl Future<Output = Result<i64>> {
|
||||
let (tx, rx) = oneshot::channel();
|
||||
self.db_update_tx
|
||||
.try_send(DbOperation::FindOrCreateWorktree { path, sender: tx })
|
||||
.unwrap();
|
||||
async move { rx.await? }
|
||||
}
|
||||
|
||||
fn get_file_mtimes(
|
||||
&self,
|
||||
worktree_id: i64,
|
||||
) -> impl Future<Output = Result<HashMap<PathBuf, SystemTime>>> {
|
||||
let (tx, rx) = oneshot::channel();
|
||||
self.db_update_tx
|
||||
.try_send(DbOperation::FileMTimes {
|
||||
worktree_id,
|
||||
sender: tx,
|
||||
})
|
||||
.unwrap();
|
||||
async move { rx.await? }
|
||||
}
|
||||
|
||||
fn add_project(
|
||||
&mut self,
|
||||
project: ModelHandle<Project>,
|
||||
cx: &mut ModelContext<Self>,
|
||||
) -> Task<Result<()>> {
|
||||
let worktree_scans_complete = project
|
||||
.read(cx)
|
||||
.worktrees(cx)
|
||||
.map(|worktree| {
|
||||
let scan_complete = worktree.read(cx).as_local().unwrap().scan_complete();
|
||||
async move {
|
||||
scan_complete.await;
|
||||
}
|
||||
})
|
||||
.collect::<Vec<_>>();
|
||||
let worktree_db_ids = project
|
||||
.read(cx)
|
||||
.worktrees(cx)
|
||||
.map(|worktree| {
|
||||
self.find_or_create_worktree(worktree.read(cx).abs_path().to_path_buf())
|
||||
})
|
||||
.collect::<Vec<_>>();
|
||||
|
||||
let fs = self.fs.clone();
|
||||
let language_registry = self.language_registry.clone();
|
||||
let database_url = self.database_url.clone();
|
||||
let db_update_tx = self.db_update_tx.clone();
|
||||
let parsing_files_tx = self.parsing_files_tx.clone();
|
||||
|
||||
cx.spawn(|this, mut cx| async move {
|
||||
futures::future::join_all(worktree_scans_complete).await;
|
||||
|
||||
let worktree_db_ids = futures::future::join_all(worktree_db_ids).await;
|
||||
|
||||
if let Some(db_directory) = database_url.parent() {
|
||||
fs.create_dir(db_directory).await.log_err();
|
||||
}
|
||||
|
||||
let worktrees = project.read_with(&cx, |project, cx| {
|
||||
project
|
||||
.worktrees(cx)
|
||||
.map(|worktree| worktree.read(cx).snapshot())
|
||||
.collect::<Vec<_>>()
|
||||
});
|
||||
|
||||
let mut worktree_file_times = HashMap::new();
|
||||
let mut db_ids_by_worktree_id = HashMap::new();
|
||||
for (worktree, db_id) in worktrees.iter().zip(worktree_db_ids) {
|
||||
let db_id = db_id?;
|
||||
db_ids_by_worktree_id.insert(worktree.id(), db_id);
|
||||
worktree_file_times.insert(
|
||||
worktree.id(),
|
||||
this.read_with(&cx, |this, _| this.get_file_mtimes(db_id))
|
||||
.await?,
|
||||
);
|
||||
}
|
||||
|
||||
cx.background()
|
||||
.spawn({
|
||||
let db_ids_by_worktree_id = db_ids_by_worktree_id.clone();
|
||||
let db_update_tx = db_update_tx.clone();
|
||||
let language_registry = language_registry.clone();
|
||||
let parsing_files_tx = parsing_files_tx.clone();
|
||||
async move {
|
||||
let t0 = Instant::now();
|
||||
for worktree in worktrees.into_iter() {
|
||||
let mut file_mtimes =
|
||||
worktree_file_times.remove(&worktree.id()).unwrap();
|
||||
for file in worktree.files(false, 0) {
|
||||
let absolute_path = worktree.absolutize(&file.path);
|
||||
|
||||
if let Ok(language) = language_registry
|
||||
.language_for_file(&absolute_path, None)
|
||||
.await
|
||||
{
|
||||
if language
|
||||
.grammar()
|
||||
.and_then(|grammar| grammar.embedding_config.as_ref())
|
||||
.is_none()
|
||||
{
|
||||
continue;
|
||||
}
|
||||
|
||||
let path_buf = file.path.to_path_buf();
|
||||
let stored_mtime = file_mtimes.remove(&file.path.to_path_buf());
|
||||
let already_stored = stored_mtime
|
||||
.map_or(false, |existing_mtime| {
|
||||
existing_mtime == file.mtime
|
||||
});
|
||||
|
||||
if !already_stored {
|
||||
parsing_files_tx
|
||||
.try_send(PendingFile {
|
||||
worktree_db_id: db_ids_by_worktree_id
|
||||
[&worktree.id()],
|
||||
relative_path: path_buf,
|
||||
absolute_path,
|
||||
language,
|
||||
modified_time: file.mtime,
|
||||
})
|
||||
.unwrap();
|
||||
}
|
||||
}
|
||||
}
|
||||
for file in file_mtimes.keys() {
|
||||
db_update_tx
|
||||
.try_send(DbOperation::Delete {
|
||||
worktree_id: db_ids_by_worktree_id[&worktree.id()],
|
||||
path: file.to_owned(),
|
||||
})
|
||||
.unwrap();
|
||||
}
|
||||
}
|
||||
log::info!(
|
||||
"Parsing Worktree Completed in {:?}",
|
||||
t0.elapsed().as_millis()
|
||||
);
|
||||
}
|
||||
})
|
||||
.detach();
|
||||
|
||||
// let mut pending_files: Vec<(PathBuf, ((i64, PathBuf, Arc<Language>, SystemTime), SystemTime))> = vec![];
|
||||
this.update(&mut cx, |this, cx| {
|
||||
// The below is managing for updated on save
|
||||
// Currently each time a file is saved, this code is run, and for all the files that were changed, if the current time is
|
||||
// greater than the previous embedded time by the REINDEXING_DELAY variable, we will send the file off to be indexed.
|
||||
let _subscription = cx.subscribe(&project, |this, project, event, cx| {
|
||||
if let project::Event::WorktreeUpdatedEntries(worktree_id, changes) = event {
|
||||
this.project_entries_changed(project, changes.clone(), cx, worktree_id);
|
||||
}
|
||||
});
|
||||
|
||||
this.projects.insert(
|
||||
project.downgrade(),
|
||||
ProjectState {
|
||||
pending_files: HashMap::new(),
|
||||
worktree_db_ids: db_ids_by_worktree_id.into_iter().collect(),
|
||||
_subscription,
|
||||
},
|
||||
);
|
||||
});
|
||||
|
||||
anyhow::Ok(())
|
||||
})
|
||||
}
|
||||
|
||||
pub fn search(
|
||||
&mut self,
|
||||
project: ModelHandle<Project>,
|
||||
phrase: String,
|
||||
limit: usize,
|
||||
cx: &mut ModelContext<Self>,
|
||||
) -> Task<Result<Vec<SearchResult>>> {
|
||||
let project_state = if let Some(state) = self.projects.get(&project.downgrade()) {
|
||||
state
|
||||
} else {
|
||||
return Task::ready(Err(anyhow!("project not added")));
|
||||
};
|
||||
|
||||
let worktree_db_ids = project
|
||||
.read(cx)
|
||||
.worktrees(cx)
|
||||
.filter_map(|worktree| {
|
||||
let worktree_id = worktree.read(cx).id();
|
||||
project_state.db_id_for_worktree_id(worktree_id)
|
||||
})
|
||||
.collect::<Vec<_>>();
|
||||
|
||||
let embedding_provider = self.embedding_provider.clone();
|
||||
let database_url = self.database_url.clone();
|
||||
cx.spawn(|this, cx| async move {
|
||||
let documents = cx
|
||||
.background()
|
||||
.spawn(async move {
|
||||
let database = VectorDatabase::new(database_url.to_string_lossy().into())?;
|
||||
|
||||
let phrase_embedding = embedding_provider
|
||||
.embed_batch(vec![&phrase])
|
||||
.await?
|
||||
.into_iter()
|
||||
.next()
|
||||
.unwrap();
|
||||
|
||||
database.top_k_search(&worktree_db_ids, &phrase_embedding, limit)
|
||||
})
|
||||
.await?;
|
||||
|
||||
this.read_with(&cx, |this, _| {
|
||||
let project_state = if let Some(state) = this.projects.get(&project.downgrade()) {
|
||||
state
|
||||
} else {
|
||||
return Err(anyhow!("project not added"));
|
||||
};
|
||||
|
||||
Ok(documents
|
||||
.into_iter()
|
||||
.filter_map(|(worktree_db_id, file_path, offset, name)| {
|
||||
let worktree_id = project_state.worktree_id_for_db_id(worktree_db_id)?;
|
||||
Some(SearchResult {
|
||||
worktree_id,
|
||||
name,
|
||||
offset,
|
||||
file_path,
|
||||
})
|
||||
})
|
||||
.collect())
|
||||
})
|
||||
})
|
||||
}
|
||||
|
||||
fn project_entries_changed(
|
||||
&mut self,
|
||||
project: ModelHandle<Project>,
|
||||
changes: Arc<[(Arc<Path>, ProjectEntryId, PathChange)]>,
|
||||
cx: &mut ModelContext<'_, VectorStore>,
|
||||
worktree_id: &WorktreeId,
|
||||
) -> Option<()> {
|
||||
let reindexing_delay = settings::get::<VectorStoreSettings>(cx).reindexing_delay_seconds;
|
||||
|
||||
let worktree = project
|
||||
.read(cx)
|
||||
.worktree_for_id(worktree_id.clone(), cx)?
|
||||
.read(cx)
|
||||
.snapshot();
|
||||
|
||||
let worktree_db_id = self
|
||||
.projects
|
||||
.get(&project.downgrade())?
|
||||
.db_id_for_worktree_id(worktree.id())?;
|
||||
let file_mtimes = self.get_file_mtimes(worktree_db_id);
|
||||
|
||||
let language_registry = self.language_registry.clone();
|
||||
|
||||
cx.spawn(|this, mut cx| async move {
|
||||
let file_mtimes = file_mtimes.await.log_err()?;
|
||||
|
||||
for change in changes.into_iter() {
|
||||
let change_path = change.0.clone();
|
||||
let absolute_path = worktree.absolutize(&change_path);
|
||||
|
||||
// Skip if git ignored or symlink
|
||||
if let Some(entry) = worktree.entry_for_id(change.1) {
|
||||
if entry.is_ignored || entry.is_symlink || entry.is_external {
|
||||
continue;
|
||||
}
|
||||
}
|
||||
|
||||
match change.2 {
|
||||
PathChange::Removed => this.update(&mut cx, |this, _| {
|
||||
this.db_update_tx
|
||||
.try_send(DbOperation::Delete {
|
||||
worktree_id: worktree_db_id,
|
||||
path: absolute_path,
|
||||
})
|
||||
.unwrap();
|
||||
}),
|
||||
_ => {
|
||||
if let Ok(language) = language_registry
|
||||
.language_for_file(&change_path.to_path_buf(), None)
|
||||
.await
|
||||
{
|
||||
if language
|
||||
.grammar()
|
||||
.and_then(|grammar| grammar.embedding_config.as_ref())
|
||||
.is_none()
|
||||
{
|
||||
continue;
|
||||
}
|
||||
|
||||
let modified_time =
|
||||
change_path.metadata().log_err()?.modified().log_err()?;
|
||||
|
||||
let existing_time = file_mtimes.get(&change_path.to_path_buf());
|
||||
let already_stored = existing_time
|
||||
.map_or(false, |existing_time| &modified_time != existing_time);
|
||||
|
||||
if !already_stored {
|
||||
this.update(&mut cx, |this, _| {
|
||||
let reindex_time = modified_time
|
||||
+ Duration::from_secs(reindexing_delay as u64);
|
||||
|
||||
let project_state =
|
||||
this.projects.get_mut(&project.downgrade())?;
|
||||
project_state.update_pending_files(
|
||||
PendingFile {
|
||||
relative_path: change_path.to_path_buf(),
|
||||
absolute_path,
|
||||
modified_time,
|
||||
worktree_db_id,
|
||||
language: language.clone(),
|
||||
},
|
||||
reindex_time,
|
||||
);
|
||||
|
||||
for file in project_state.get_outstanding_files() {
|
||||
this.parsing_files_tx.try_send(file).unwrap();
|
||||
}
|
||||
Some(())
|
||||
});
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
Some(())
|
||||
})
|
||||
.detach();
|
||||
|
||||
Some(())
|
||||
}
|
||||
}
|
||||
|
||||
impl Entity for VectorStore {
|
||||
type Event = ();
|
||||
}
|
30
crates/vector_store/src/vector_store_settings.rs
Normal file
30
crates/vector_store/src/vector_store_settings.rs
Normal file
|
@ -0,0 +1,30 @@
|
|||
use anyhow;
|
||||
use schemars::JsonSchema;
|
||||
use serde::{Deserialize, Serialize};
|
||||
use settings::Setting;
|
||||
|
||||
#[derive(Deserialize, Debug)]
|
||||
pub struct VectorStoreSettings {
|
||||
pub enabled: bool,
|
||||
pub reindexing_delay_seconds: usize,
|
||||
}
|
||||
|
||||
#[derive(Clone, Default, Serialize, Deserialize, JsonSchema, Debug)]
|
||||
pub struct VectorStoreSettingsContent {
|
||||
pub enabled: Option<bool>,
|
||||
pub reindexing_delay_seconds: Option<usize>,
|
||||
}
|
||||
|
||||
impl Setting for VectorStoreSettings {
|
||||
const KEY: Option<&'static str> = Some("vector_store");
|
||||
|
||||
type FileContent = VectorStoreSettingsContent;
|
||||
|
||||
fn load(
|
||||
default_value: &Self::FileContent,
|
||||
user_values: &[&Self::FileContent],
|
||||
_: &gpui::AppContext,
|
||||
) -> anyhow::Result<Self> {
|
||||
Self::load_via_json_merge(default_value, user_values)
|
||||
}
|
||||
}
|
161
crates/vector_store/src/vector_store_tests.rs
Normal file
161
crates/vector_store/src/vector_store_tests.rs
Normal file
|
@ -0,0 +1,161 @@
|
|||
use crate::{
|
||||
db::dot, embedding::EmbeddingProvider, vector_store_settings::VectorStoreSettings, VectorStore,
|
||||
};
|
||||
use anyhow::Result;
|
||||
use async_trait::async_trait;
|
||||
use gpui::{Task, TestAppContext};
|
||||
use language::{Language, LanguageConfig, LanguageRegistry};
|
||||
use project::{project_settings::ProjectSettings, FakeFs, Project};
|
||||
use rand::{rngs::StdRng, Rng};
|
||||
use serde_json::json;
|
||||
use settings::SettingsStore;
|
||||
use std::sync::Arc;
|
||||
use unindent::Unindent;
|
||||
|
||||
#[gpui::test]
|
||||
async fn test_vector_store(cx: &mut TestAppContext) {
|
||||
cx.update(|cx| {
|
||||
cx.set_global(SettingsStore::test(cx));
|
||||
settings::register::<VectorStoreSettings>(cx);
|
||||
settings::register::<ProjectSettings>(cx);
|
||||
});
|
||||
|
||||
let fs = FakeFs::new(cx.background());
|
||||
fs.insert_tree(
|
||||
"/the-root",
|
||||
json!({
|
||||
"src": {
|
||||
"file1.rs": "
|
||||
fn aaa() {
|
||||
println!(\"aaaa!\");
|
||||
}
|
||||
|
||||
fn zzzzzzzzz() {
|
||||
println!(\"SLEEPING\");
|
||||
}
|
||||
".unindent(),
|
||||
"file2.rs": "
|
||||
fn bbb() {
|
||||
println!(\"bbbb!\");
|
||||
}
|
||||
".unindent(),
|
||||
}
|
||||
}),
|
||||
)
|
||||
.await;
|
||||
|
||||
let languages = Arc::new(LanguageRegistry::new(Task::ready(())));
|
||||
let rust_language = Arc::new(
|
||||
Language::new(
|
||||
LanguageConfig {
|
||||
name: "Rust".into(),
|
||||
path_suffixes: vec!["rs".into()],
|
||||
..Default::default()
|
||||
},
|
||||
Some(tree_sitter_rust::language()),
|
||||
)
|
||||
.with_embedding_query(
|
||||
r#"
|
||||
(function_item
|
||||
name: (identifier) @name
|
||||
body: (block)) @item
|
||||
"#,
|
||||
)
|
||||
.unwrap(),
|
||||
);
|
||||
languages.add(rust_language);
|
||||
|
||||
let db_dir = tempdir::TempDir::new("vector-store").unwrap();
|
||||
let db_path = db_dir.path().join("db.sqlite");
|
||||
|
||||
let store = VectorStore::new(
|
||||
fs.clone(),
|
||||
db_path,
|
||||
Arc::new(FakeEmbeddingProvider),
|
||||
languages,
|
||||
cx.to_async(),
|
||||
)
|
||||
.await
|
||||
.unwrap();
|
||||
|
||||
let project = Project::test(fs, ["/the-root".as_ref()], cx).await;
|
||||
let worktree_id = project.read_with(cx, |project, cx| {
|
||||
project.worktrees(cx).next().unwrap().read(cx).id()
|
||||
});
|
||||
store
|
||||
.update(cx, |store, cx| store.add_project(project.clone(), cx))
|
||||
.await
|
||||
.unwrap();
|
||||
cx.foreground().run_until_parked();
|
||||
|
||||
let search_results = store
|
||||
.update(cx, |store, cx| {
|
||||
store.search(project.clone(), "aaaa".to_string(), 5, cx)
|
||||
})
|
||||
.await
|
||||
.unwrap();
|
||||
|
||||
assert_eq!(search_results[0].offset, 0);
|
||||
assert_eq!(search_results[0].name, "aaa");
|
||||
assert_eq!(search_results[0].worktree_id, worktree_id);
|
||||
}
|
||||
|
||||
#[gpui::test]
|
||||
fn test_dot_product(mut rng: StdRng) {
|
||||
assert_eq!(dot(&[1., 0., 0., 0., 0.], &[0., 1., 0., 0., 0.]), 0.);
|
||||
assert_eq!(dot(&[2., 0., 0., 0., 0.], &[3., 1., 0., 0., 0.]), 6.);
|
||||
|
||||
for _ in 0..100 {
|
||||
let size = 1536;
|
||||
let mut a = vec![0.; size];
|
||||
let mut b = vec![0.; size];
|
||||
for (a, b) in a.iter_mut().zip(b.iter_mut()) {
|
||||
*a = rng.gen();
|
||||
*b = rng.gen();
|
||||
}
|
||||
|
||||
assert_eq!(
|
||||
round_to_decimals(dot(&a, &b), 1),
|
||||
round_to_decimals(reference_dot(&a, &b), 1)
|
||||
);
|
||||
}
|
||||
|
||||
fn round_to_decimals(n: f32, decimal_places: i32) -> f32 {
|
||||
let factor = (10.0 as f32).powi(decimal_places);
|
||||
(n * factor).round() / factor
|
||||
}
|
||||
|
||||
fn reference_dot(a: &[f32], b: &[f32]) -> f32 {
|
||||
a.iter().zip(b.iter()).map(|(a, b)| a * b).sum()
|
||||
}
|
||||
}
|
||||
|
||||
struct FakeEmbeddingProvider;
|
||||
|
||||
#[async_trait]
|
||||
impl EmbeddingProvider for FakeEmbeddingProvider {
|
||||
async fn embed_batch(&self, spans: Vec<&str>) -> Result<Vec<Vec<f32>>> {
|
||||
Ok(spans
|
||||
.iter()
|
||||
.map(|span| {
|
||||
let mut result = vec![1.0; 26];
|
||||
for letter in span.chars() {
|
||||
let letter = letter.to_ascii_lowercase();
|
||||
if letter as u32 >= 'a' as u32 {
|
||||
let ix = (letter as u32) - ('a' as u32);
|
||||
if ix < 26 {
|
||||
result[ix as usize] += 1.0;
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
let norm = result.iter().map(|x| x * x).sum::<f32>().sqrt();
|
||||
for x in &mut result {
|
||||
*x /= norm;
|
||||
}
|
||||
|
||||
result
|
||||
})
|
||||
.collect())
|
||||
}
|
||||
}
|
|
@ -120,7 +120,7 @@ impl PickerDelegate for BaseKeymapSelectorDelegate {
|
|||
})
|
||||
}
|
||||
|
||||
fn confirm(&mut self, cx: &mut ViewContext<BaseKeymapSelector>) {
|
||||
fn confirm(&mut self, _: bool, cx: &mut ViewContext<BaseKeymapSelector>) {
|
||||
if let Some(selection) = self.matches.get(self.selected_index) {
|
||||
let base_keymap = BaseKeymap::from_names(&selection.string);
|
||||
update_settings_file::<BaseKeymap>(self.fs.clone(), cx, move |setting| {
|
||||
|
|
|
@ -27,7 +27,7 @@ use std::{
|
|||
};
|
||||
use theme::Theme;
|
||||
|
||||
#[derive(Eq, PartialEq, Hash)]
|
||||
#[derive(Eq, PartialEq, Hash, Debug)]
|
||||
pub enum ItemEvent {
|
||||
CloseItem,
|
||||
UpdateTab,
|
||||
|
|
|
@ -2316,6 +2316,7 @@ mod tests {
|
|||
cx.set_global(SettingsStore::test(cx));
|
||||
theme::init((), cx);
|
||||
crate::init_settings(cx);
|
||||
Project::init_settings(cx);
|
||||
});
|
||||
}
|
||||
|
||||
|
|
|
@ -1,6 +1,8 @@
|
|||
use std::sync::Arc;
|
||||
use std::{cell::RefCell, rc::Rc, sync::Arc};
|
||||
|
||||
use crate::{AppState, FollowerStatesByLeader, Pane, Workspace, WorkspaceSettings};
|
||||
use crate::{
|
||||
pane_group::element::PaneAxisElement, AppState, FollowerStatesByLeader, Pane, Workspace,
|
||||
};
|
||||
use anyhow::{anyhow, Result};
|
||||
use call::{ActiveCall, ParticipantLocation};
|
||||
use gpui::{
|
||||
|
@ -13,7 +15,11 @@ use project::Project;
|
|||
use serde::Deserialize;
|
||||
use theme::Theme;
|
||||
|
||||
#[derive(Clone, Debug, Eq, PartialEq)]
|
||||
const HANDLE_HITBOX_SIZE: f32 = 4.0;
|
||||
const HORIZONTAL_MIN_SIZE: f32 = 80.;
|
||||
const VERTICAL_MIN_SIZE: f32 = 100.;
|
||||
|
||||
#[derive(Clone, Debug, PartialEq)]
|
||||
pub struct PaneGroup {
|
||||
pub(crate) root: Member,
|
||||
}
|
||||
|
@ -77,6 +83,7 @@ impl PaneGroup {
|
|||
) -> AnyElement<Workspace> {
|
||||
self.root.render(
|
||||
project,
|
||||
0,
|
||||
theme,
|
||||
follower_states,
|
||||
active_call,
|
||||
|
@ -94,7 +101,7 @@ impl PaneGroup {
|
|||
}
|
||||
}
|
||||
|
||||
#[derive(Clone, Debug, Eq, PartialEq)]
|
||||
#[derive(Clone, Debug, PartialEq)]
|
||||
pub(crate) enum Member {
|
||||
Axis(PaneAxis),
|
||||
Pane(ViewHandle<Pane>),
|
||||
|
@ -119,7 +126,7 @@ impl Member {
|
|||
Down | Right => vec![Member::Pane(old_pane), Member::Pane(new_pane)],
|
||||
};
|
||||
|
||||
Member::Axis(PaneAxis { axis, members })
|
||||
Member::Axis(PaneAxis::new(axis, members))
|
||||
}
|
||||
|
||||
fn contains(&self, needle: &ViewHandle<Pane>) -> bool {
|
||||
|
@ -132,6 +139,7 @@ impl Member {
|
|||
pub fn render(
|
||||
&self,
|
||||
project: &ModelHandle<Project>,
|
||||
basis: usize,
|
||||
theme: &Theme,
|
||||
follower_states: &FollowerStatesByLeader,
|
||||
active_call: Option<&ModelHandle<ActiveCall>>,
|
||||
|
@ -272,6 +280,7 @@ impl Member {
|
|||
}
|
||||
Member::Axis(axis) => axis.render(
|
||||
project,
|
||||
basis + 1,
|
||||
theme,
|
||||
follower_states,
|
||||
active_call,
|
||||
|
@ -295,13 +304,35 @@ impl Member {
|
|||
}
|
||||
}
|
||||
|
||||
#[derive(Clone, Debug, Eq, PartialEq)]
|
||||
#[derive(Clone, Debug, PartialEq)]
|
||||
pub(crate) struct PaneAxis {
|
||||
pub axis: Axis,
|
||||
pub members: Vec<Member>,
|
||||
pub flexes: Rc<RefCell<Vec<f32>>>,
|
||||
}
|
||||
|
||||
impl PaneAxis {
|
||||
pub fn new(axis: Axis, members: Vec<Member>) -> Self {
|
||||
let flexes = Rc::new(RefCell::new(vec![1.; members.len()]));
|
||||
Self {
|
||||
axis,
|
||||
members,
|
||||
flexes,
|
||||
}
|
||||
}
|
||||
|
||||
pub fn load(axis: Axis, members: Vec<Member>, flexes: Option<Vec<f32>>) -> Self {
|
||||
let flexes = flexes.unwrap_or_else(|| vec![1.; members.len()]);
|
||||
debug_assert!(members.len() == flexes.len());
|
||||
|
||||
let flexes = Rc::new(RefCell::new(flexes));
|
||||
Self {
|
||||
axis,
|
||||
members,
|
||||
flexes,
|
||||
}
|
||||
}
|
||||
|
||||
fn split(
|
||||
&mut self,
|
||||
old_pane: &ViewHandle<Pane>,
|
||||
|
@ -323,6 +354,7 @@ impl PaneAxis {
|
|||
}
|
||||
|
||||
self.members.insert(idx, Member::Pane(new_pane.clone()));
|
||||
*self.flexes.borrow_mut() = vec![1.; self.members.len()];
|
||||
} else {
|
||||
*member =
|
||||
Member::new_axis(old_pane.clone(), new_pane.clone(), direction);
|
||||
|
@ -362,10 +394,13 @@ impl PaneAxis {
|
|||
if found_pane {
|
||||
if let Some(idx) = remove_member {
|
||||
self.members.remove(idx);
|
||||
*self.flexes.borrow_mut() = vec![1.; self.members.len()];
|
||||
}
|
||||
|
||||
if self.members.len() == 1 {
|
||||
Ok(self.members.pop())
|
||||
let result = self.members.pop();
|
||||
*self.flexes.borrow_mut() = vec![1.; self.members.len()];
|
||||
Ok(result)
|
||||
} else {
|
||||
Ok(None)
|
||||
}
|
||||
|
@ -377,6 +412,7 @@ impl PaneAxis {
|
|||
fn render(
|
||||
&self,
|
||||
project: &ModelHandle<Project>,
|
||||
basis: usize,
|
||||
theme: &Theme,
|
||||
follower_state: &FollowerStatesByLeader,
|
||||
active_call: Option<&ModelHandle<ActiveCall>>,
|
||||
|
@ -385,40 +421,50 @@ impl PaneAxis {
|
|||
app_state: &Arc<AppState>,
|
||||
cx: &mut ViewContext<Workspace>,
|
||||
) -> AnyElement<Workspace> {
|
||||
let last_member_ix = self.members.len() - 1;
|
||||
Flex::new(self.axis)
|
||||
.with_children(self.members.iter().enumerate().map(|(ix, member)| {
|
||||
let mut flex = 1.0;
|
||||
if member.contains(active_pane) {
|
||||
flex = settings::get::<WorkspaceSettings>(cx).active_pane_magnification;
|
||||
debug_assert!(self.members.len() == self.flexes.borrow().len());
|
||||
|
||||
let mut pane_axis = PaneAxisElement::new(self.axis, basis, self.flexes.clone());
|
||||
let mut active_pane_ix = None;
|
||||
|
||||
let mut members = self.members.iter().enumerate().peekable();
|
||||
while let Some((ix, member)) = members.next() {
|
||||
let last = members.peek().is_none();
|
||||
|
||||
if member.contains(active_pane) {
|
||||
active_pane_ix = Some(ix);
|
||||
}
|
||||
|
||||
let mut member = member.render(
|
||||
project,
|
||||
(basis + ix) * 10,
|
||||
theme,
|
||||
follower_state,
|
||||
active_call,
|
||||
active_pane,
|
||||
zoomed,
|
||||
app_state,
|
||||
cx,
|
||||
);
|
||||
|
||||
if !last {
|
||||
let mut border = theme.workspace.pane_divider;
|
||||
border.left = false;
|
||||
border.right = false;
|
||||
border.top = false;
|
||||
border.bottom = false;
|
||||
|
||||
match self.axis {
|
||||
Axis::Vertical => border.bottom = true,
|
||||
Axis::Horizontal => border.right = true,
|
||||
}
|
||||
|
||||
let mut member = member.render(
|
||||
project,
|
||||
theme,
|
||||
follower_state,
|
||||
active_call,
|
||||
active_pane,
|
||||
zoomed,
|
||||
app_state,
|
||||
cx,
|
||||
);
|
||||
if ix < last_member_ix {
|
||||
let mut border = theme.workspace.pane_divider;
|
||||
border.left = false;
|
||||
border.right = false;
|
||||
border.top = false;
|
||||
border.bottom = false;
|
||||
match self.axis {
|
||||
Axis::Vertical => border.bottom = true,
|
||||
Axis::Horizontal => border.right = true,
|
||||
}
|
||||
member = member.contained().with_border(border).into_any();
|
||||
}
|
||||
member = member.contained().with_border(border).into_any();
|
||||
}
|
||||
|
||||
FlexItem::new(member).flex(flex, true)
|
||||
}))
|
||||
.into_any()
|
||||
pane_axis = pane_axis.with_child(member.into_any());
|
||||
}
|
||||
pane_axis.set_active_pane(active_pane_ix);
|
||||
pane_axis.into_any()
|
||||
}
|
||||
}
|
||||
|
||||
|
@ -474,3 +520,336 @@ impl SplitDirection {
|
|||
}
|
||||
}
|
||||
}
|
||||
|
||||
mod element {
|
||||
use std::{cell::RefCell, ops::Range, rc::Rc};
|
||||
|
||||
use gpui::{
|
||||
geometry::{
|
||||
rect::RectF,
|
||||
vector::{vec2f, Vector2F},
|
||||
},
|
||||
json::{self, ToJson},
|
||||
platform::{CursorStyle, MouseButton},
|
||||
AnyElement, Axis, CursorRegion, Element, LayoutContext, MouseRegion, RectFExt,
|
||||
SceneBuilder, SizeConstraint, Vector2FExt, ViewContext,
|
||||
};
|
||||
|
||||
use crate::{
|
||||
pane_group::{HANDLE_HITBOX_SIZE, HORIZONTAL_MIN_SIZE, VERTICAL_MIN_SIZE},
|
||||
Workspace, WorkspaceSettings,
|
||||
};
|
||||
|
||||
pub struct PaneAxisElement {
|
||||
axis: Axis,
|
||||
basis: usize,
|
||||
active_pane_ix: Option<usize>,
|
||||
flexes: Rc<RefCell<Vec<f32>>>,
|
||||
children: Vec<AnyElement<Workspace>>,
|
||||
}
|
||||
|
||||
impl PaneAxisElement {
|
||||
pub fn new(axis: Axis, basis: usize, flexes: Rc<RefCell<Vec<f32>>>) -> Self {
|
||||
Self {
|
||||
axis,
|
||||
basis,
|
||||
flexes,
|
||||
active_pane_ix: None,
|
||||
children: Default::default(),
|
||||
}
|
||||
}
|
||||
|
||||
pub fn set_active_pane(&mut self, active_pane_ix: Option<usize>) {
|
||||
self.active_pane_ix = active_pane_ix;
|
||||
}
|
||||
|
||||
fn layout_children(
|
||||
&mut self,
|
||||
active_pane_magnification: f32,
|
||||
constraint: SizeConstraint,
|
||||
remaining_space: &mut f32,
|
||||
remaining_flex: &mut f32,
|
||||
cross_axis_max: &mut f32,
|
||||
view: &mut Workspace,
|
||||
cx: &mut LayoutContext<Workspace>,
|
||||
) {
|
||||
let flexes = self.flexes.borrow();
|
||||
let cross_axis = self.axis.invert();
|
||||
for (ix, child) in self.children.iter_mut().enumerate() {
|
||||
let flex = if active_pane_magnification != 1. {
|
||||
if let Some(active_pane_ix) = self.active_pane_ix {
|
||||
if ix == active_pane_ix {
|
||||
active_pane_magnification
|
||||
} else {
|
||||
1.
|
||||
}
|
||||
} else {
|
||||
1.
|
||||
}
|
||||
} else {
|
||||
flexes[ix]
|
||||
};
|
||||
|
||||
let child_size = if *remaining_flex == 0.0 {
|
||||
*remaining_space
|
||||
} else {
|
||||
let space_per_flex = *remaining_space / *remaining_flex;
|
||||
space_per_flex * flex
|
||||
};
|
||||
|
||||
let child_constraint = match self.axis {
|
||||
Axis::Horizontal => SizeConstraint::new(
|
||||
vec2f(child_size, constraint.min.y()),
|
||||
vec2f(child_size, constraint.max.y()),
|
||||
),
|
||||
Axis::Vertical => SizeConstraint::new(
|
||||
vec2f(constraint.min.x(), child_size),
|
||||
vec2f(constraint.max.x(), child_size),
|
||||
),
|
||||
};
|
||||
let child_size = child.layout(child_constraint, view, cx);
|
||||
*remaining_space -= child_size.along(self.axis);
|
||||
*remaining_flex -= flex;
|
||||
*cross_axis_max = cross_axis_max.max(child_size.along(cross_axis));
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
impl Extend<AnyElement<Workspace>> for PaneAxisElement {
|
||||
fn extend<T: IntoIterator<Item = AnyElement<Workspace>>>(&mut self, children: T) {
|
||||
self.children.extend(children);
|
||||
}
|
||||
}
|
||||
|
||||
impl Element<Workspace> for PaneAxisElement {
|
||||
type LayoutState = f32;
|
||||
type PaintState = ();
|
||||
|
||||
fn layout(
|
||||
&mut self,
|
||||
constraint: SizeConstraint,
|
||||
view: &mut Workspace,
|
||||
cx: &mut LayoutContext<Workspace>,
|
||||
) -> (Vector2F, Self::LayoutState) {
|
||||
debug_assert!(self.children.len() == self.flexes.borrow().len());
|
||||
|
||||
let active_pane_magnification =
|
||||
settings::get::<WorkspaceSettings>(cx).active_pane_magnification;
|
||||
|
||||
let mut remaining_flex = 0.;
|
||||
|
||||
if active_pane_magnification != 1. {
|
||||
let active_pane_flex = self
|
||||
.active_pane_ix
|
||||
.map(|_| active_pane_magnification)
|
||||
.unwrap_or(1.);
|
||||
remaining_flex += self.children.len() as f32 - 1. + active_pane_flex;
|
||||
} else {
|
||||
for flex in self.flexes.borrow().iter() {
|
||||
remaining_flex += flex;
|
||||
}
|
||||
}
|
||||
|
||||
let mut cross_axis_max: f32 = 0.0;
|
||||
let mut remaining_space = constraint.max_along(self.axis);
|
||||
|
||||
if remaining_space.is_infinite() {
|
||||
panic!("flex contains flexible children but has an infinite constraint along the flex axis");
|
||||
}
|
||||
|
||||
self.layout_children(
|
||||
active_pane_magnification,
|
||||
constraint,
|
||||
&mut remaining_space,
|
||||
&mut remaining_flex,
|
||||
&mut cross_axis_max,
|
||||
view,
|
||||
cx,
|
||||
);
|
||||
|
||||
let mut size = match self.axis {
|
||||
Axis::Horizontal => vec2f(constraint.max.x() - remaining_space, cross_axis_max),
|
||||
Axis::Vertical => vec2f(cross_axis_max, constraint.max.y() - remaining_space),
|
||||
};
|
||||
|
||||
if constraint.min.x().is_finite() {
|
||||
size.set_x(size.x().max(constraint.min.x()));
|
||||
}
|
||||
if constraint.min.y().is_finite() {
|
||||
size.set_y(size.y().max(constraint.min.y()));
|
||||
}
|
||||
|
||||
if size.x() > constraint.max.x() {
|
||||
size.set_x(constraint.max.x());
|
||||
}
|
||||
if size.y() > constraint.max.y() {
|
||||
size.set_y(constraint.max.y());
|
||||
}
|
||||
|
||||
(size, remaining_space)
|
||||
}
|
||||
|
||||
fn paint(
|
||||
&mut self,
|
||||
scene: &mut SceneBuilder,
|
||||
bounds: RectF,
|
||||
visible_bounds: RectF,
|
||||
remaining_space: &mut Self::LayoutState,
|
||||
view: &mut Workspace,
|
||||
cx: &mut ViewContext<Workspace>,
|
||||
) -> Self::PaintState {
|
||||
let can_resize = settings::get::<WorkspaceSettings>(cx).active_pane_magnification == 1.;
|
||||
let visible_bounds = bounds.intersection(visible_bounds).unwrap_or_default();
|
||||
|
||||
let overflowing = *remaining_space < 0.;
|
||||
if overflowing {
|
||||
scene.push_layer(Some(visible_bounds));
|
||||
}
|
||||
|
||||
let mut child_origin = bounds.origin();
|
||||
|
||||
let mut children_iter = self.children.iter_mut().enumerate().peekable();
|
||||
while let Some((ix, child)) = children_iter.next() {
|
||||
let child_start = child_origin.clone();
|
||||
child.paint(scene, child_origin, visible_bounds, view, cx);
|
||||
|
||||
match self.axis {
|
||||
Axis::Horizontal => child_origin += vec2f(child.size().x(), 0.0),
|
||||
Axis::Vertical => child_origin += vec2f(0.0, child.size().y()),
|
||||
}
|
||||
|
||||
if let Some(Some((next_ix, next_child))) = can_resize.then(|| children_iter.peek())
|
||||
{
|
||||
scene.push_stacking_context(None, None);
|
||||
|
||||
let handle_origin = match self.axis {
|
||||
Axis::Horizontal => child_origin - vec2f(HANDLE_HITBOX_SIZE / 2., 0.0),
|
||||
Axis::Vertical => child_origin - vec2f(0.0, HANDLE_HITBOX_SIZE / 2.),
|
||||
};
|
||||
|
||||
let handle_bounds = match self.axis {
|
||||
Axis::Horizontal => RectF::new(
|
||||
handle_origin,
|
||||
vec2f(HANDLE_HITBOX_SIZE, visible_bounds.height()),
|
||||
),
|
||||
Axis::Vertical => RectF::new(
|
||||
handle_origin,
|
||||
vec2f(visible_bounds.width(), HANDLE_HITBOX_SIZE),
|
||||
),
|
||||
};
|
||||
|
||||
let style = match self.axis {
|
||||
Axis::Horizontal => CursorStyle::ResizeLeftRight,
|
||||
Axis::Vertical => CursorStyle::ResizeUpDown,
|
||||
};
|
||||
|
||||
scene.push_cursor_region(CursorRegion {
|
||||
bounds: handle_bounds,
|
||||
style,
|
||||
});
|
||||
|
||||
let axis = self.axis;
|
||||
let child_size = child.size();
|
||||
let next_child_size = next_child.size();
|
||||
let drag_bounds = visible_bounds.clone();
|
||||
let flexes = self.flexes.clone();
|
||||
let current_flex = flexes.borrow()[ix];
|
||||
let next_ix = *next_ix;
|
||||
let next_flex = flexes.borrow()[next_ix];
|
||||
enum ResizeHandle {}
|
||||
let mut mouse_region = MouseRegion::new::<ResizeHandle>(
|
||||
cx.view_id(),
|
||||
self.basis + ix,
|
||||
handle_bounds,
|
||||
);
|
||||
mouse_region = mouse_region.on_drag(
|
||||
MouseButton::Left,
|
||||
move |drag, workspace: &mut Workspace, cx| {
|
||||
let min_size = match axis {
|
||||
Axis::Horizontal => HORIZONTAL_MIN_SIZE,
|
||||
Axis::Vertical => VERTICAL_MIN_SIZE,
|
||||
};
|
||||
// Don't allow resizing to less than the minimum size, if elements are already too small
|
||||
if min_size - 1. > child_size.along(axis)
|
||||
|| min_size - 1. > next_child_size.along(axis)
|
||||
{
|
||||
return;
|
||||
}
|
||||
|
||||
let mut current_target_size = (drag.position - child_start).along(axis);
|
||||
|
||||
let proposed_current_pixel_change =
|
||||
current_target_size - child_size.along(axis);
|
||||
|
||||
if proposed_current_pixel_change < 0. {
|
||||
current_target_size = f32::max(current_target_size, min_size);
|
||||
} else if proposed_current_pixel_change > 0. {
|
||||
// TODO: cascade this change to other children if current item is at min size
|
||||
let next_target_size = f32::max(
|
||||
next_child_size.along(axis) - proposed_current_pixel_change,
|
||||
min_size,
|
||||
);
|
||||
current_target_size = f32::min(
|
||||
current_target_size,
|
||||
child_size.along(axis) + next_child_size.along(axis)
|
||||
- next_target_size,
|
||||
);
|
||||
}
|
||||
|
||||
let current_pixel_change = current_target_size - child_size.along(axis);
|
||||
let flex_change = current_pixel_change / drag_bounds.length_along(axis);
|
||||
let current_target_flex = current_flex + flex_change;
|
||||
let next_target_flex = next_flex - flex_change;
|
||||
|
||||
let mut borrow = flexes.borrow_mut();
|
||||
*borrow.get_mut(ix).unwrap() = current_target_flex;
|
||||
*borrow.get_mut(next_ix).unwrap() = next_target_flex;
|
||||
|
||||
workspace.schedule_serialize(cx);
|
||||
cx.notify();
|
||||
},
|
||||
);
|
||||
scene.push_mouse_region(mouse_region);
|
||||
|
||||
scene.pop_stacking_context();
|
||||
}
|
||||
}
|
||||
|
||||
if overflowing {
|
||||
scene.pop_layer();
|
||||
}
|
||||
}
|
||||
|
||||
fn rect_for_text_range(
|
||||
&self,
|
||||
range_utf16: Range<usize>,
|
||||
_: RectF,
|
||||
_: RectF,
|
||||
_: &Self::LayoutState,
|
||||
_: &Self::PaintState,
|
||||
view: &Workspace,
|
||||
cx: &ViewContext<Workspace>,
|
||||
) -> Option<RectF> {
|
||||
self.children
|
||||
.iter()
|
||||
.find_map(|child| child.rect_for_text_range(range_utf16.clone(), view, cx))
|
||||
}
|
||||
|
||||
fn debug(
|
||||
&self,
|
||||
bounds: RectF,
|
||||
_: &Self::LayoutState,
|
||||
_: &Self::PaintState,
|
||||
view: &Workspace,
|
||||
cx: &ViewContext<Workspace>,
|
||||
) -> json::Value {
|
||||
serde_json::json!({
|
||||
"type": "PaneAxis",
|
||||
"bounds": bounds.to_json(),
|
||||
"axis": self.axis.to_json(),
|
||||
"flexes": *self.flexes.borrow(),
|
||||
"children": self.children.iter().map(|child| child.debug(view, cx)).collect::<Vec<json::Value>>()
|
||||
})
|
||||
}
|
||||
}
|
||||
}
|
||||
|
|
|
@ -45,6 +45,7 @@ define_connection! {
|
|||
// parent_group_id: Option<usize>, // None indicates that this is the root node
|
||||
// position: Optiopn<usize>, // None indicates that this is the root node
|
||||
// axis: Option<Axis>, // 'Vertical', 'Horizontal'
|
||||
// flexes: Option<Vec<f32>>, // A JSON array of floats
|
||||
// )
|
||||
//
|
||||
// panes(
|
||||
|
@ -168,7 +169,12 @@ define_connection! {
|
|||
ALTER TABLE workspaces ADD COLUMN left_dock_zoom INTEGER; //bool
|
||||
ALTER TABLE workspaces ADD COLUMN right_dock_zoom INTEGER; //bool
|
||||
ALTER TABLE workspaces ADD COLUMN bottom_dock_zoom INTEGER; //bool
|
||||
)];
|
||||
),
|
||||
// Add pane group flex data
|
||||
sql!(
|
||||
ALTER TABLE pane_groups ADD COLUMN flexes TEXT;
|
||||
)
|
||||
];
|
||||
}
|
||||
|
||||
impl WorkspaceDb {
|
||||
|
@ -359,38 +365,51 @@ impl WorkspaceDb {
|
|||
group_id: Option<GroupId>,
|
||||
) -> Result<Vec<SerializedPaneGroup>> {
|
||||
type GroupKey = (Option<GroupId>, WorkspaceId);
|
||||
type GroupOrPane = (Option<GroupId>, Option<Axis>, Option<PaneId>, Option<bool>);
|
||||
type GroupOrPane = (
|
||||
Option<GroupId>,
|
||||
Option<Axis>,
|
||||
Option<PaneId>,
|
||||
Option<bool>,
|
||||
Option<String>,
|
||||
);
|
||||
self.select_bound::<GroupKey, GroupOrPane>(sql!(
|
||||
SELECT group_id, axis, pane_id, active
|
||||
SELECT group_id, axis, pane_id, active, flexes
|
||||
FROM (SELECT
|
||||
group_id,
|
||||
axis,
|
||||
NULL as pane_id,
|
||||
NULL as active,
|
||||
position,
|
||||
parent_group_id,
|
||||
workspace_id
|
||||
FROM pane_groups
|
||||
group_id,
|
||||
axis,
|
||||
NULL as pane_id,
|
||||
NULL as active,
|
||||
position,
|
||||
parent_group_id,
|
||||
workspace_id,
|
||||
flexes
|
||||
FROM pane_groups
|
||||
UNION
|
||||
SELECT
|
||||
NULL,
|
||||
NULL,
|
||||
center_panes.pane_id,
|
||||
panes.active as active,
|
||||
position,
|
||||
parent_group_id,
|
||||
panes.workspace_id as workspace_id
|
||||
FROM center_panes
|
||||
JOIN panes ON center_panes.pane_id = panes.pane_id)
|
||||
SELECT
|
||||
NULL,
|
||||
NULL,
|
||||
center_panes.pane_id,
|
||||
panes.active as active,
|
||||
position,
|
||||
parent_group_id,
|
||||
panes.workspace_id as workspace_id,
|
||||
NULL
|
||||
FROM center_panes
|
||||
JOIN panes ON center_panes.pane_id = panes.pane_id)
|
||||
WHERE parent_group_id IS ? AND workspace_id = ?
|
||||
ORDER BY position
|
||||
))?((group_id, workspace_id))?
|
||||
.into_iter()
|
||||
.map(|(group_id, axis, pane_id, active)| {
|
||||
.map(|(group_id, axis, pane_id, active, flexes)| {
|
||||
if let Some((group_id, axis)) = group_id.zip(axis) {
|
||||
let flexes = flexes
|
||||
.map(|flexes| serde_json::from_str::<Vec<f32>>(&flexes))
|
||||
.transpose()?;
|
||||
|
||||
Ok(SerializedPaneGroup::Group {
|
||||
axis,
|
||||
children: self.get_pane_group(workspace_id, Some(group_id))?,
|
||||
flexes,
|
||||
})
|
||||
} else if let Some((pane_id, active)) = pane_id.zip(active) {
|
||||
Ok(SerializedPaneGroup::Pane(SerializedPane::new(
|
||||
|
@ -417,14 +436,34 @@ impl WorkspaceDb {
|
|||
parent: Option<(GroupId, usize)>,
|
||||
) -> Result<()> {
|
||||
match pane_group {
|
||||
SerializedPaneGroup::Group { axis, children } => {
|
||||
SerializedPaneGroup::Group {
|
||||
axis,
|
||||
children,
|
||||
flexes,
|
||||
} => {
|
||||
let (parent_id, position) = unzip_option(parent);
|
||||
|
||||
let flex_string = flexes
|
||||
.as_ref()
|
||||
.map(|flexes| serde_json::json!(flexes).to_string());
|
||||
|
||||
let group_id = conn.select_row_bound::<_, i64>(sql!(
|
||||
INSERT INTO pane_groups(workspace_id, parent_group_id, position, axis)
|
||||
VALUES (?, ?, ?, ?)
|
||||
INSERT INTO pane_groups(
|
||||
workspace_id,
|
||||
parent_group_id,
|
||||
position,
|
||||
axis,
|
||||
flexes
|
||||
)
|
||||
VALUES (?, ?, ?, ?, ?)
|
||||
RETURNING group_id
|
||||
))?((workspace_id, parent_id, position, *axis))?
|
||||
))?((
|
||||
workspace_id,
|
||||
parent_id,
|
||||
position,
|
||||
*axis,
|
||||
flex_string,
|
||||
))?
|
||||
.ok_or_else(|| anyhow!("Couldn't retrieve group_id from inserted pane_group"))?;
|
||||
|
||||
for (position, group) in children.iter().enumerate() {
|
||||
|
@ -641,6 +680,14 @@ mod tests {
|
|||
assert_eq!(test_text_1, "test-text-1");
|
||||
}
|
||||
|
||||
fn group(axis: gpui::Axis, children: Vec<SerializedPaneGroup>) -> SerializedPaneGroup {
|
||||
SerializedPaneGroup::Group {
|
||||
axis,
|
||||
flexes: None,
|
||||
children,
|
||||
}
|
||||
}
|
||||
|
||||
#[gpui::test]
|
||||
async fn test_full_workspace_serialization() {
|
||||
env_logger::try_init().ok();
|
||||
|
@ -652,12 +699,12 @@ mod tests {
|
|||
// | - - - | |
|
||||
// | 3,4 | |
|
||||
// -----------------
|
||||
let center_group = SerializedPaneGroup::Group {
|
||||
axis: gpui::Axis::Horizontal,
|
||||
children: vec![
|
||||
SerializedPaneGroup::Group {
|
||||
axis: gpui::Axis::Vertical,
|
||||
children: vec![
|
||||
let center_group = group(
|
||||
gpui::Axis::Horizontal,
|
||||
vec![
|
||||
group(
|
||||
gpui::Axis::Vertical,
|
||||
vec![
|
||||
SerializedPaneGroup::Pane(SerializedPane::new(
|
||||
vec![
|
||||
SerializedItem::new("Terminal", 5, false),
|
||||
|
@ -673,7 +720,7 @@ mod tests {
|
|||
false,
|
||||
)),
|
||||
],
|
||||
},
|
||||
),
|
||||
SerializedPaneGroup::Pane(SerializedPane::new(
|
||||
vec![
|
||||
SerializedItem::new("Terminal", 9, false),
|
||||
|
@ -682,7 +729,7 @@ mod tests {
|
|||
false,
|
||||
)),
|
||||
],
|
||||
};
|
||||
);
|
||||
|
||||
let workspace = SerializedWorkspace {
|
||||
id: 5,
|
||||
|
@ -811,12 +858,12 @@ mod tests {
|
|||
// | - - - | |
|
||||
// | 3,4 | |
|
||||
// -----------------
|
||||
let center_pane = SerializedPaneGroup::Group {
|
||||
axis: gpui::Axis::Horizontal,
|
||||
children: vec![
|
||||
SerializedPaneGroup::Group {
|
||||
axis: gpui::Axis::Vertical,
|
||||
children: vec![
|
||||
let center_pane = group(
|
||||
gpui::Axis::Horizontal,
|
||||
vec![
|
||||
group(
|
||||
gpui::Axis::Vertical,
|
||||
vec![
|
||||
SerializedPaneGroup::Pane(SerializedPane::new(
|
||||
vec![
|
||||
SerializedItem::new("Terminal", 1, false),
|
||||
|
@ -832,7 +879,7 @@ mod tests {
|
|||
true,
|
||||
)),
|
||||
],
|
||||
},
|
||||
),
|
||||
SerializedPaneGroup::Pane(SerializedPane::new(
|
||||
vec![
|
||||
SerializedItem::new("Terminal", 5, true),
|
||||
|
@ -841,7 +888,7 @@ mod tests {
|
|||
false,
|
||||
)),
|
||||
],
|
||||
};
|
||||
);
|
||||
|
||||
let workspace = default_workspace(&["/tmp"], ¢er_pane);
|
||||
|
||||
|
@ -858,12 +905,12 @@ mod tests {
|
|||
|
||||
let db = WorkspaceDb(open_test_db("test_cleanup_panes").await);
|
||||
|
||||
let center_pane = SerializedPaneGroup::Group {
|
||||
axis: gpui::Axis::Horizontal,
|
||||
children: vec![
|
||||
SerializedPaneGroup::Group {
|
||||
axis: gpui::Axis::Vertical,
|
||||
children: vec![
|
||||
let center_pane = group(
|
||||
gpui::Axis::Horizontal,
|
||||
vec![
|
||||
group(
|
||||
gpui::Axis::Vertical,
|
||||
vec![
|
||||
SerializedPaneGroup::Pane(SerializedPane::new(
|
||||
vec![
|
||||
SerializedItem::new("Terminal", 1, false),
|
||||
|
@ -879,7 +926,7 @@ mod tests {
|
|||
true,
|
||||
)),
|
||||
],
|
||||
},
|
||||
),
|
||||
SerializedPaneGroup::Pane(SerializedPane::new(
|
||||
vec![
|
||||
SerializedItem::new("Terminal", 5, false),
|
||||
|
@ -888,7 +935,7 @@ mod tests {
|
|||
false,
|
||||
)),
|
||||
],
|
||||
};
|
||||
);
|
||||
|
||||
let id = &["/tmp"];
|
||||
|
||||
|
@ -896,9 +943,9 @@ mod tests {
|
|||
|
||||
db.save_workspace(workspace.clone()).await;
|
||||
|
||||
workspace.center_group = SerializedPaneGroup::Group {
|
||||
axis: gpui::Axis::Vertical,
|
||||
children: vec![
|
||||
workspace.center_group = group(
|
||||
gpui::Axis::Vertical,
|
||||
vec![
|
||||
SerializedPaneGroup::Pane(SerializedPane::new(
|
||||
vec![
|
||||
SerializedItem::new("Terminal", 1, false),
|
||||
|
@ -914,7 +961,7 @@ mod tests {
|
|||
true,
|
||||
)),
|
||||
],
|
||||
};
|
||||
);
|
||||
|
||||
db.save_workspace(workspace.clone()).await;
|
||||
|
||||
|
|
|
@ -127,10 +127,11 @@ impl Bind for DockData {
|
|||
}
|
||||
}
|
||||
|
||||
#[derive(Debug, PartialEq, Eq, Clone)]
|
||||
#[derive(Debug, PartialEq, Clone)]
|
||||
pub enum SerializedPaneGroup {
|
||||
Group {
|
||||
axis: Axis,
|
||||
flexes: Option<Vec<f32>>,
|
||||
children: Vec<SerializedPaneGroup>,
|
||||
},
|
||||
Pane(SerializedPane),
|
||||
|
@ -149,7 +150,7 @@ impl Default for SerializedPaneGroup {
|
|||
impl SerializedPaneGroup {
|
||||
#[async_recursion(?Send)]
|
||||
pub(crate) async fn deserialize(
|
||||
&self,
|
||||
self,
|
||||
project: &ModelHandle<Project>,
|
||||
workspace_id: WorkspaceId,
|
||||
workspace: &WeakViewHandle<Workspace>,
|
||||
|
@ -160,7 +161,11 @@ impl SerializedPaneGroup {
|
|||
Vec<Option<Box<dyn ItemHandle>>>,
|
||||
)> {
|
||||
match self {
|
||||
SerializedPaneGroup::Group { axis, children } => {
|
||||
SerializedPaneGroup::Group {
|
||||
axis,
|
||||
children,
|
||||
flexes,
|
||||
} => {
|
||||
let mut current_active_pane = None;
|
||||
let mut members = Vec::new();
|
||||
let mut items = Vec::new();
|
||||
|
@ -184,10 +189,7 @@ impl SerializedPaneGroup {
|
|||
}
|
||||
|
||||
Some((
|
||||
Member::Axis(PaneAxis {
|
||||
axis: *axis,
|
||||
members,
|
||||
}),
|
||||
Member::Axis(PaneAxis::load(axis, members, flexes)),
|
||||
current_active_pane,
|
||||
items,
|
||||
))
|
||||
|
|
|
@ -37,7 +37,11 @@ pub trait SearchableItem: Item {
|
|||
regex: true,
|
||||
}
|
||||
}
|
||||
fn to_search_event(event: &Self::Event) -> Option<SearchEvent>;
|
||||
fn to_search_event(
|
||||
&mut self,
|
||||
event: &Self::Event,
|
||||
cx: &mut ViewContext<Self>,
|
||||
) -> Option<SearchEvent>;
|
||||
fn clear_matches(&mut self, cx: &mut ViewContext<Self>);
|
||||
fn update_matches(&mut self, matches: Vec<Self::Match>, cx: &mut ViewContext<Self>);
|
||||
fn query_suggestion(&mut self, cx: &mut ViewContext<Self>) -> String;
|
||||
|
@ -47,6 +51,7 @@ pub trait SearchableItem: Item {
|
|||
matches: Vec<Self::Match>,
|
||||
cx: &mut ViewContext<Self>,
|
||||
);
|
||||
fn select_matches(&mut self, matches: Vec<Self::Match>, cx: &mut ViewContext<Self>);
|
||||
fn match_index_for_direction(
|
||||
&mut self,
|
||||
matches: &Vec<Self::Match>,
|
||||
|
@ -102,6 +107,7 @@ pub trait SearchableItemHandle: ItemHandle {
|
|||
matches: &Vec<Box<dyn Any + Send>>,
|
||||
cx: &mut WindowContext,
|
||||
);
|
||||
fn select_matches(&self, matches: &Vec<Box<dyn Any + Send>>, cx: &mut WindowContext);
|
||||
fn match_index_for_direction(
|
||||
&self,
|
||||
matches: &Vec<Box<dyn Any + Send>>,
|
||||
|
@ -139,8 +145,9 @@ impl<T: SearchableItem> SearchableItemHandle for ViewHandle<T> {
|
|||
cx: &mut WindowContext,
|
||||
handler: Box<dyn Fn(SearchEvent, &mut WindowContext)>,
|
||||
) -> Subscription {
|
||||
cx.subscribe(self, move |_, event, cx| {
|
||||
if let Some(search_event) = T::to_search_event(event) {
|
||||
cx.subscribe(self, move |handle, event, cx| {
|
||||
let search_event = handle.update(cx, |handle, cx| handle.to_search_event(event, cx));
|
||||
if let Some(search_event) = search_event {
|
||||
handler(search_event, cx)
|
||||
}
|
||||
})
|
||||
|
@ -165,6 +172,12 @@ impl<T: SearchableItem> SearchableItemHandle for ViewHandle<T> {
|
|||
let matches = downcast_matches(matches);
|
||||
self.update(cx, |this, cx| this.activate_match(index, matches, cx));
|
||||
}
|
||||
|
||||
fn select_matches(&self, matches: &Vec<Box<dyn Any + Send>>, cx: &mut WindowContext) {
|
||||
let matches = downcast_matches(matches);
|
||||
self.update(cx, |this, cx| this.select_matches(matches, cx));
|
||||
}
|
||||
|
||||
fn match_index_for_direction(
|
||||
&self,
|
||||
matches: &Vec<Box<dyn Any + Send>>,
|
||||
|
|
|
@ -1,8 +1,4 @@
|
|||
pub mod dock;
|
||||
/// NOTE: Focus only 'takes' after an update has flushed_effects.
|
||||
///
|
||||
/// This may cause issues when you're trying to write tests that use workspace focus to add items at
|
||||
/// specific locations.
|
||||
pub mod item;
|
||||
pub mod notifications;
|
||||
pub mod pane;
|
||||
|
@ -508,6 +504,7 @@ pub struct Workspace {
|
|||
subscriptions: Vec<Subscription>,
|
||||
_apply_leader_updates: Task<Result<()>>,
|
||||
_observe_current_user: Task<Result<()>>,
|
||||
_schedule_serialize: Option<Task<()>>,
|
||||
pane_history_timestamp: Arc<AtomicUsize>,
|
||||
}
|
||||
|
||||
|
@ -722,6 +719,7 @@ impl Workspace {
|
|||
app_state,
|
||||
_observe_current_user,
|
||||
_apply_leader_updates,
|
||||
_schedule_serialize: None,
|
||||
leader_updates_tx,
|
||||
subscriptions,
|
||||
pane_history_timestamp,
|
||||
|
@ -1823,6 +1821,13 @@ impl Workspace {
|
|||
.update(cx, |pane, cx| pane.add_item(item, true, true, None, cx));
|
||||
}
|
||||
|
||||
pub fn split_item(&mut self, item: Box<dyn ItemHandle>, cx: &mut ViewContext<Self>) {
|
||||
let new_pane = self.split_pane(self.active_pane.clone(), SplitDirection::Right, cx);
|
||||
new_pane.update(cx, move |new_pane, cx| {
|
||||
new_pane.add_item(item, true, true, None, cx)
|
||||
})
|
||||
}
|
||||
|
||||
pub fn open_abs_path(
|
||||
&mut self,
|
||||
abs_path: PathBuf,
|
||||
|
@ -1853,6 +1858,21 @@ impl Workspace {
|
|||
})
|
||||
}
|
||||
|
||||
pub fn split_abs_path(
|
||||
&mut self,
|
||||
abs_path: PathBuf,
|
||||
visible: bool,
|
||||
cx: &mut ViewContext<Self>,
|
||||
) -> Task<anyhow::Result<Box<dyn ItemHandle>>> {
|
||||
let project_path_task =
|
||||
Workspace::project_path_for_path(self.project.clone(), &abs_path, visible, cx);
|
||||
cx.spawn(|this, mut cx| async move {
|
||||
let (_, path) = project_path_task.await?;
|
||||
this.update(&mut cx, |this, cx| this.split_path(path, cx))?
|
||||
.await
|
||||
})
|
||||
}
|
||||
|
||||
pub fn open_path(
|
||||
&mut self,
|
||||
path: impl Into<ProjectPath>,
|
||||
|
@ -1878,6 +1898,38 @@ impl Workspace {
|
|||
})
|
||||
}
|
||||
|
||||
pub fn split_path(
|
||||
&mut self,
|
||||
path: impl Into<ProjectPath>,
|
||||
cx: &mut ViewContext<Self>,
|
||||
) -> Task<Result<Box<dyn ItemHandle>, anyhow::Error>> {
|
||||
let pane = self.last_active_center_pane.clone().unwrap_or_else(|| {
|
||||
self.panes
|
||||
.first()
|
||||
.expect("There must be an active pane")
|
||||
.downgrade()
|
||||
});
|
||||
|
||||
if let Member::Pane(center_pane) = &self.center.root {
|
||||
if center_pane.read(cx).items_len() == 0 {
|
||||
return self.open_path(path, Some(pane), true, cx);
|
||||
}
|
||||
}
|
||||
|
||||
let task = self.load_path(path.into(), cx);
|
||||
cx.spawn(|this, mut cx| async move {
|
||||
let (project_entry_id, build_item) = task.await?;
|
||||
this.update(&mut cx, move |this, cx| -> Option<_> {
|
||||
let pane = pane.upgrade(cx)?;
|
||||
let new_pane = this.split_pane(pane, SplitDirection::Right, cx);
|
||||
new_pane.update(cx, |new_pane, cx| {
|
||||
Some(new_pane.open_item(project_entry_id, true, cx, build_item))
|
||||
})
|
||||
})
|
||||
.map(|option| option.ok_or_else(|| anyhow!("pane was dropped")))?
|
||||
})
|
||||
}
|
||||
|
||||
pub(crate) fn load_path(
|
||||
&mut self,
|
||||
path: ProjectPath,
|
||||
|
@ -1928,6 +1980,30 @@ impl Workspace {
|
|||
item
|
||||
}
|
||||
|
||||
pub fn split_project_item<T>(
|
||||
&mut self,
|
||||
project_item: ModelHandle<T::Item>,
|
||||
cx: &mut ViewContext<Self>,
|
||||
) -> ViewHandle<T>
|
||||
where
|
||||
T: ProjectItem,
|
||||
{
|
||||
use project::Item as _;
|
||||
|
||||
let entry_id = project_item.read(cx).entry_id(cx);
|
||||
if let Some(item) = entry_id
|
||||
.and_then(|entry_id| self.active_pane().read(cx).item_for_entry(entry_id, cx))
|
||||
.and_then(|item| item.downcast())
|
||||
{
|
||||
self.activate_item(&item, cx);
|
||||
return item;
|
||||
}
|
||||
|
||||
let item = cx.add_view(|cx| T::for_project_item(self.project().clone(), project_item, cx));
|
||||
self.split_item(Box::new(item.clone()), cx);
|
||||
item
|
||||
}
|
||||
|
||||
pub fn open_shared_screen(&mut self, peer_id: PeerId, cx: &mut ViewContext<Self>) {
|
||||
if let Some(shared_screen) = self.shared_screen_for_peer(peer_id, &self.active_pane, cx) {
|
||||
self.active_pane.update(cx, |pane, cx| {
|
||||
|
@ -1955,7 +2031,7 @@ impl Workspace {
|
|||
if let Some(pane) = panes.get(action.0).map(|p| (*p).clone()) {
|
||||
cx.focus(&pane);
|
||||
} else {
|
||||
self.split_pane(self.active_pane.clone(), SplitDirection::Right, cx);
|
||||
self.split_and_clone(self.active_pane.clone(), SplitDirection::Right, cx);
|
||||
}
|
||||
}
|
||||
|
||||
|
@ -2008,7 +2084,7 @@ impl Workspace {
|
|||
match event {
|
||||
pane::Event::AddItem { item } => item.added_to_pane(self, pane, cx),
|
||||
pane::Event::Split(direction) => {
|
||||
self.split_pane(pane, *direction, cx);
|
||||
self.split_and_clone(pane, *direction, cx);
|
||||
}
|
||||
pane::Event::Remove => self.remove_pane(pane, cx),
|
||||
pane::Event::ActivateItem { local } => {
|
||||
|
@ -2059,6 +2135,20 @@ impl Workspace {
|
|||
}
|
||||
|
||||
pub fn split_pane(
|
||||
&mut self,
|
||||
pane_to_split: ViewHandle<Pane>,
|
||||
split_direction: SplitDirection,
|
||||
cx: &mut ViewContext<Self>,
|
||||
) -> ViewHandle<Pane> {
|
||||
let new_pane = self.add_pane(cx);
|
||||
self.center
|
||||
.split(&pane_to_split, &new_pane, split_direction)
|
||||
.unwrap();
|
||||
cx.notify();
|
||||
new_pane
|
||||
}
|
||||
|
||||
pub fn split_and_clone(
|
||||
&mut self,
|
||||
pane: ViewHandle<Pane>,
|
||||
direction: SplitDirection,
|
||||
|
@ -2897,6 +2987,14 @@ impl Workspace {
|
|||
cx.notify();
|
||||
}
|
||||
|
||||
fn schedule_serialize(&mut self, cx: &mut ViewContext<Self>) {
|
||||
self._schedule_serialize = Some(cx.spawn(|this, cx| async move {
|
||||
cx.background().timer(Duration::from_millis(100)).await;
|
||||
this.read_with(&cx, |this, cx| this.serialize_workspace(cx))
|
||||
.ok();
|
||||
}));
|
||||
}
|
||||
|
||||
fn serialize_workspace(&self, cx: &ViewContext<Self>) {
|
||||
fn serialize_pane_handle(
|
||||
pane_handle: &ViewHandle<Pane>,
|
||||
|
@ -2927,12 +3025,17 @@ impl Workspace {
|
|||
cx: &AppContext,
|
||||
) -> SerializedPaneGroup {
|
||||
match pane_group {
|
||||
Member::Axis(PaneAxis { axis, members }) => SerializedPaneGroup::Group {
|
||||
Member::Axis(PaneAxis {
|
||||
axis,
|
||||
members,
|
||||
flexes,
|
||||
}) => SerializedPaneGroup::Group {
|
||||
axis: *axis,
|
||||
children: members
|
||||
.iter()
|
||||
.map(|member| build_serialized_pane_group(member, cx))
|
||||
.collect::<Vec<_>>(),
|
||||
flexes: Some(flexes.borrow().clone()),
|
||||
},
|
||||
Member::Pane(pane_handle) => {
|
||||
SerializedPaneGroup::Pane(serialize_pane_handle(&pane_handle, cx))
|
||||
|
@ -3399,27 +3502,11 @@ fn notify_if_database_failed(workspace: &WeakViewHandle<Workspace>, cx: &mut Asy
|
|||
if (*db::ALL_FILE_DB_FAILED).load(std::sync::atomic::Ordering::Acquire) {
|
||||
workspace.show_notification_once(0, cx, |cx| {
|
||||
cx.add_view(|_| {
|
||||
MessageNotification::new("Failed to load any database file.")
|
||||
MessageNotification::new("Failed to load the database file.")
|
||||
.with_click_message("Click to let us know about this error")
|
||||
.on_click(|cx| cx.platform().open_url(REPORT_ISSUE_URL))
|
||||
})
|
||||
});
|
||||
} else {
|
||||
let backup_path = (*db::BACKUP_DB_PATH).read();
|
||||
if let Some(backup_path) = backup_path.clone() {
|
||||
workspace.show_notification_once(1, cx, move |cx| {
|
||||
cx.add_view(move |_| {
|
||||
MessageNotification::new(format!(
|
||||
"Database file was corrupted. Old database backed up to {}",
|
||||
backup_path.display()
|
||||
))
|
||||
.with_click_message("Click to show old database in finder")
|
||||
.on_click(move |cx| {
|
||||
cx.platform().open_url(&backup_path.to_string_lossy())
|
||||
})
|
||||
})
|
||||
});
|
||||
}
|
||||
}
|
||||
})
|
||||
.log_err();
|
||||
|
@ -4235,7 +4322,7 @@ mod tests {
|
|||
});
|
||||
|
||||
workspace
|
||||
.split_pane(left_pane.clone(), SplitDirection::Right, cx)
|
||||
.split_and_clone(left_pane.clone(), SplitDirection::Right, cx)
|
||||
.unwrap();
|
||||
|
||||
left_pane
|
||||
|
|
|
@ -3,7 +3,7 @@ authors = ["Nathan Sobo <nathansobo@gmail.com>"]
|
|||
description = "The fast, collaborative code editor."
|
||||
edition = "2021"
|
||||
name = "zed"
|
||||
version = "0.95.0"
|
||||
version = "0.96.0"
|
||||
publish = false
|
||||
|
||||
[lib]
|
||||
|
@ -64,6 +64,7 @@ terminal_view = { path = "../terminal_view" }
|
|||
theme = { path = "../theme" }
|
||||
theme_selector = { path = "../theme_selector" }
|
||||
util = { path = "../util" }
|
||||
vector_store = { path = "../vector_store" }
|
||||
vim = { path = "../vim" }
|
||||
workspace = { path = "../workspace" }
|
||||
welcome = { path = "../welcome" }
|
||||
|
@ -103,26 +104,28 @@ thiserror.workspace = true
|
|||
tiny_http = "0.8"
|
||||
toml.workspace = true
|
||||
tree-sitter.workspace = true
|
||||
tree-sitter-c = "0.20.1"
|
||||
tree-sitter-cpp = "0.20.0"
|
||||
tree-sitter-css = { git = "https://github.com/tree-sitter/tree-sitter-css", rev = "769203d0f9abe1a9a691ac2b9fe4bb4397a73c51" }
|
||||
tree-sitter-elixir = { git = "https://github.com/elixir-lang/tree-sitter-elixir", rev = "4ba9dab6e2602960d95b2b625f3386c27e08084e" }
|
||||
tree-sitter-embedded-template = "0.20.0"
|
||||
tree-sitter-go = { git = "https://github.com/tree-sitter/tree-sitter-go", rev = "aeb2f33b366fd78d5789ff104956ce23508b85db" }
|
||||
tree-sitter-heex = { git = "https://github.com/phoenixframework/tree-sitter-heex", rev = "2e1348c3cf2c9323e87c2744796cf3f3868aa82a" }
|
||||
tree-sitter-json = { git = "https://github.com/tree-sitter/tree-sitter-json", rev = "40a81c01a40ac48744e0c8ccabbaba1920441199" }
|
||||
tree-sitter-rust = "0.20.3"
|
||||
tree-sitter-markdown = { git = "https://github.com/MDeiml/tree-sitter-markdown", rev = "330ecab87a3e3a7211ac69bbadc19eabecdb1cca" }
|
||||
tree-sitter-python = "0.20.2"
|
||||
tree-sitter-php = { git = "https://github.com/tree-sitter/tree-sitter-php", rev = "d38adb26304d9b9d38e9a3b4aae0ec4b29bf9462" }
|
||||
tree-sitter-toml = { git = "https://github.com/tree-sitter/tree-sitter-toml", rev = "342d9be207c2dba869b9967124c679b5e6fd0ebe" }
|
||||
tree-sitter-typescript = { git = "https://github.com/tree-sitter/tree-sitter-typescript", rev = "5d20856f34315b068c41edaee2ac8a100081d259" }
|
||||
tree-sitter-ruby = "0.20.0"
|
||||
tree-sitter-html = "0.19.0"
|
||||
tree-sitter-scheme = { git = "https://github.com/6cdh/tree-sitter-scheme", rev = "af0fd1fa452cb2562dc7b5c8a8c55551c39273b9"}
|
||||
tree-sitter-racket = { git = "https://github.com/zed-industries/tree-sitter-racket", rev = "eb010cf2c674c6fd9a6316a84e28ef90190fe51a"}
|
||||
tree-sitter-yaml = { git = "https://github.com/zed-industries/tree-sitter-yaml", rev = "f545a41f57502e1b5ddf2a6668896c1b0620f930"}
|
||||
tree-sitter-lua = "0.0.14"
|
||||
tree-sitter-c.workspace = true
|
||||
tree-sitter-cpp.workspace = true
|
||||
tree-sitter-css.workspace = true
|
||||
tree-sitter-elixir.workspace = true
|
||||
tree-sitter-embedded-template.workspace = true
|
||||
tree-sitter-go.workspace = true
|
||||
tree-sitter-heex.workspace = true
|
||||
tree-sitter-json.workspace = true
|
||||
tree-sitter-rust.workspace = true
|
||||
tree-sitter-markdown.workspace = true
|
||||
tree-sitter-python.workspace = true
|
||||
tree-sitter-toml.workspace = true
|
||||
tree-sitter-typescript.workspace = true
|
||||
tree-sitter-ruby.workspace = true
|
||||
tree-sitter-html.workspace = true
|
||||
tree-sitter-php.workspace = true
|
||||
tree-sitter-scheme.workspace = true
|
||||
tree-sitter-svelte.workspace = true
|
||||
tree-sitter-racket.workspace = true
|
||||
tree-sitter-yaml.workspace = true
|
||||
tree-sitter-lua.workspace = true
|
||||
|
||||
url = "2.2"
|
||||
urlencoding = "2.1.2"
|
||||
uuid = { version = "1.1.2", features = ["v4"] }
|
||||
|
|
|
@ -17,6 +17,7 @@ mod php;
|
|||
mod python;
|
||||
mod ruby;
|
||||
mod rust;
|
||||
mod svelte;
|
||||
mod typescript;
|
||||
mod yaml;
|
||||
|
||||
|
@ -138,6 +139,13 @@ pub fn init(languages: Arc<LanguageRegistry>, node_runtime: Arc<NodeRuntime>) {
|
|||
tree_sitter_yaml::language(),
|
||||
vec![Arc::new(yaml::YamlLspAdapter::new(node_runtime.clone()))],
|
||||
);
|
||||
language(
|
||||
"svelte",
|
||||
tree_sitter_svelte::language(),
|
||||
vec![Arc::new(svelte::SvelteLspAdapter::new(
|
||||
node_runtime.clone(),
|
||||
))],
|
||||
);
|
||||
language(
|
||||
"php",
|
||||
tree_sitter_php::language(),
|
||||
|
@ -176,6 +184,7 @@ fn load_queries(name: &str) -> LanguageQueries {
|
|||
brackets: load_query(name, "/brackets"),
|
||||
indents: load_query(name, "/indents"),
|
||||
outline: load_query(name, "/outline"),
|
||||
embedding: load_query(name, "/embedding"),
|
||||
injections: load_query(name, "/injections"),
|
||||
overrides: load_query(name, "/overrides"),
|
||||
}
|
||||
|
|
|
@ -4,4 +4,4 @@ autoclose_before = ">})"
|
|||
brackets = [
|
||||
{ start = "<", end = ">", close = true, newline = true },
|
||||
]
|
||||
block_comment = ["<%#", "%>"]
|
||||
block_comment = ["<%!-- ", " --%>"]
|
||||
|
|
|
@ -1,10 +1,7 @@
|
|||
; HEEx delimiters
|
||||
[
|
||||
"--%>"
|
||||
"-->"
|
||||
"/>"
|
||||
"<!"
|
||||
"<!--"
|
||||
"<"
|
||||
"</"
|
||||
"</:"
|
||||
|
@ -21,6 +18,9 @@
|
|||
"<%%="
|
||||
"<%="
|
||||
"%>"
|
||||
"--%>"
|
||||
"-->"
|
||||
"<!--"
|
||||
] @keyword
|
||||
|
||||
; HEEx operators are highlighted as such
|
||||
|
|
56
crates/zed/src/languages/javascript/embedding.scm
Normal file
56
crates/zed/src/languages/javascript/embedding.scm
Normal file
|
@ -0,0 +1,56 @@
|
|||
; (internal_module
|
||||
; "namespace" @context
|
||||
; name: (_) @name) @item
|
||||
|
||||
(enum_declaration
|
||||
"enum" @context
|
||||
name: (_) @name) @item
|
||||
|
||||
(function_declaration
|
||||
"async"? @context
|
||||
"function" @context
|
||||
name: (_) @name) @item
|
||||
|
||||
(interface_declaration
|
||||
"interface" @context
|
||||
name: (_) @name) @item
|
||||
|
||||
; (program
|
||||
; (export_statement
|
||||
; (lexical_declaration
|
||||
; ["let" "const"] @context
|
||||
; (variable_declarator
|
||||
; name: (_) @name) @item)))
|
||||
|
||||
(program
|
||||
(lexical_declaration
|
||||
["let" "const"] @context
|
||||
(variable_declarator
|
||||
name: (_) @name) @item))
|
||||
|
||||
(class_declaration
|
||||
"class" @context
|
||||
name: (_) @name) @item
|
||||
|
||||
(method_definition
|
||||
[
|
||||
"get"
|
||||
"set"
|
||||
"async"
|
||||
"*"
|
||||
"readonly"
|
||||
"static"
|
||||
(override_modifier)
|
||||
(accessibility_modifier)
|
||||
]* @context
|
||||
name: (_) @name) @item
|
||||
|
||||
; (public_field_definition
|
||||
; [
|
||||
; "declare"
|
||||
; "readonly"
|
||||
; "abstract"
|
||||
; "static"
|
||||
; (accessibility_modifier)
|
||||
; ]* @context
|
||||
; name: (_) @name) @item
|
9
crates/zed/src/languages/python/embedding.scm
Normal file
9
crates/zed/src/languages/python/embedding.scm
Normal file
|
@ -0,0 +1,9 @@
|
|||
(class_definition
|
||||
"class" @context
|
||||
name: (identifier) @name
|
||||
) @item
|
||||
|
||||
(function_definition
|
||||
"async"? @context
|
||||
"def" @context
|
||||
name: (_) @name) @item
|
36
crates/zed/src/languages/rust/embedding.scm
Normal file
36
crates/zed/src/languages/rust/embedding.scm
Normal file
|
@ -0,0 +1,36 @@
|
|||
(struct_item
|
||||
(visibility_modifier)? @context
|
||||
"struct" @context
|
||||
name: (_) @name) @item
|
||||
|
||||
(enum_item
|
||||
(visibility_modifier)? @context
|
||||
"enum" @context
|
||||
name: (_) @name) @item
|
||||
|
||||
(impl_item
|
||||
"impl" @context
|
||||
trait: (_)? @name
|
||||
"for"? @context
|
||||
type: (_) @name) @item
|
||||
|
||||
(trait_item
|
||||
(visibility_modifier)? @context
|
||||
"trait" @context
|
||||
name: (_) @name) @item
|
||||
|
||||
(function_item
|
||||
(visibility_modifier)? @context
|
||||
(function_modifiers)? @context
|
||||
"fn" @context
|
||||
name: (_) @name) @item
|
||||
|
||||
(function_signature_item
|
||||
(visibility_modifier)? @context
|
||||
(function_modifiers)? @context
|
||||
"fn" @context
|
||||
name: (_) @name) @item
|
||||
|
||||
(macro_definition
|
||||
. "macro_rules!" @context
|
||||
name: (_) @name) @item
|
125
crates/zed/src/languages/svelte.rs
Normal file
125
crates/zed/src/languages/svelte.rs
Normal file
|
@ -0,0 +1,125 @@
|
|||
use anyhow::{anyhow, Result};
|
||||
use async_trait::async_trait;
|
||||
use futures::StreamExt;
|
||||
use language::{LanguageServerName, LspAdapter, LspAdapterDelegate};
|
||||
use lsp::LanguageServerBinary;
|
||||
use node_runtime::NodeRuntime;
|
||||
use serde_json::json;
|
||||
use smol::fs;
|
||||
use std::{
|
||||
any::Any,
|
||||
ffi::OsString,
|
||||
path::{Path, PathBuf},
|
||||
sync::Arc,
|
||||
};
|
||||
use util::ResultExt;
|
||||
|
||||
const SERVER_PATH: &'static str = "node_modules/svelte-language-server/bin/server.js";
|
||||
|
||||
fn server_binary_arguments(server_path: &Path) -> Vec<OsString> {
|
||||
vec![server_path.into(), "--stdio".into()]
|
||||
}
|
||||
|
||||
pub struct SvelteLspAdapter {
|
||||
node: Arc<NodeRuntime>,
|
||||
}
|
||||
|
||||
impl SvelteLspAdapter {
|
||||
pub fn new(node: Arc<NodeRuntime>) -> Self {
|
||||
SvelteLspAdapter { node }
|
||||
}
|
||||
}
|
||||
|
||||
#[async_trait]
|
||||
impl LspAdapter for SvelteLspAdapter {
|
||||
async fn name(&self) -> LanguageServerName {
|
||||
LanguageServerName("svelte-language-server".into())
|
||||
}
|
||||
|
||||
async fn fetch_latest_server_version(
|
||||
&self,
|
||||
_: &dyn LspAdapterDelegate,
|
||||
) -> Result<Box<dyn 'static + Any + Send>> {
|
||||
Ok(Box::new(
|
||||
self.node
|
||||
.npm_package_latest_version("svelte-language-server")
|
||||
.await?,
|
||||
) as Box<_>)
|
||||
}
|
||||
|
||||
async fn fetch_server_binary(
|
||||
&self,
|
||||
version: Box<dyn 'static + Send + Any>,
|
||||
container_dir: PathBuf,
|
||||
_: &dyn LspAdapterDelegate,
|
||||
) -> Result<LanguageServerBinary> {
|
||||
let version = version.downcast::<String>().unwrap();
|
||||
let server_path = container_dir.join(SERVER_PATH);
|
||||
|
||||
if fs::metadata(&server_path).await.is_err() {
|
||||
self.node
|
||||
.npm_install_packages(
|
||||
&container_dir,
|
||||
[("svelte-language-server", version.as_str())],
|
||||
)
|
||||
.await?;
|
||||
}
|
||||
|
||||
Ok(LanguageServerBinary {
|
||||
path: self.node.binary_path().await?,
|
||||
arguments: server_binary_arguments(&server_path),
|
||||
})
|
||||
}
|
||||
|
||||
async fn cached_server_binary(
|
||||
&self,
|
||||
container_dir: PathBuf,
|
||||
_: &dyn LspAdapterDelegate,
|
||||
) -> Option<LanguageServerBinary> {
|
||||
get_cached_server_binary(container_dir, &self.node).await
|
||||
}
|
||||
|
||||
async fn installation_test_binary(
|
||||
&self,
|
||||
container_dir: PathBuf,
|
||||
) -> Option<LanguageServerBinary> {
|
||||
get_cached_server_binary(container_dir, &self.node).await
|
||||
}
|
||||
|
||||
async fn initialization_options(&self) -> Option<serde_json::Value> {
|
||||
Some(json!({
|
||||
"provideFormatter": true
|
||||
}))
|
||||
}
|
||||
}
|
||||
|
||||
async fn get_cached_server_binary(
|
||||
container_dir: PathBuf,
|
||||
node: &NodeRuntime,
|
||||
) -> Option<LanguageServerBinary> {
|
||||
(|| async move {
|
||||
let mut last_version_dir = None;
|
||||
let mut entries = fs::read_dir(&container_dir).await?;
|
||||
while let Some(entry) = entries.next().await {
|
||||
let entry = entry?;
|
||||
if entry.file_type().await?.is_dir() {
|
||||
last_version_dir = Some(entry.path());
|
||||
}
|
||||
}
|
||||
let last_version_dir = last_version_dir.ok_or_else(|| anyhow!("no cached binary"))?;
|
||||
let server_path = last_version_dir.join(SERVER_PATH);
|
||||
if server_path.exists() {
|
||||
Ok(LanguageServerBinary {
|
||||
path: node.binary_path().await?,
|
||||
arguments: server_binary_arguments(&server_path),
|
||||
})
|
||||
} else {
|
||||
Err(anyhow!(
|
||||
"missing executable in directory {:?}",
|
||||
last_version_dir
|
||||
))
|
||||
}
|
||||
})()
|
||||
.await
|
||||
.log_err()
|
||||
}
|
18
crates/zed/src/languages/svelte/config.toml
Normal file
18
crates/zed/src/languages/svelte/config.toml
Normal file
|
@ -0,0 +1,18 @@
|
|||
name = "Svelte"
|
||||
path_suffixes = ["svelte"]
|
||||
line_comment = "// "
|
||||
autoclose_before = ";:.,=}])>"
|
||||
brackets = [
|
||||
{ start = "{", end = "}", close = true, newline = true },
|
||||
{ start = "[", end = "]", close = true, newline = true },
|
||||
{ start = "(", end = ")", close = true, newline = true },
|
||||
{ start = "<", end = ">", close = false, newline = true, not_in = ["string", "comment"] },
|
||||
{ start = "\"", end = "\"", close = true, newline = false, not_in = ["string"] },
|
||||
{ start = "'", end = "'", close = true, newline = false, not_in = ["string", "comment"] },
|
||||
{ start = "`", end = "`", close = true, newline = false, not_in = ["string"] },
|
||||
{ start = "/*", end = " */", close = true, newline = false, not_in = ["string", "comment"] },
|
||||
]
|
||||
|
||||
[overrides.element]
|
||||
line_comment = { remove = true }
|
||||
block_comment = ["{/* ", " */}"]
|
9
crates/zed/src/languages/svelte/folds.scm
Executable file
9
crates/zed/src/languages/svelte/folds.scm
Executable file
|
@ -0,0 +1,9 @@
|
|||
[
|
||||
(style_element)
|
||||
(script_element)
|
||||
(element)
|
||||
(if_statement)
|
||||
(else_statement)
|
||||
(each_statement)
|
||||
(await_statement)
|
||||
] @fold
|
42
crates/zed/src/languages/svelte/highlights.scm
Executable file
42
crates/zed/src/languages/svelte/highlights.scm
Executable file
|
@ -0,0 +1,42 @@
|
|||
; Special identifiers
|
||||
;--------------------
|
||||
|
||||
; TODO:
|
||||
(tag_name) @tag
|
||||
(attribute_name) @property
|
||||
(erroneous_end_tag_name) @keyword
|
||||
(comment) @comment
|
||||
|
||||
[
|
||||
(attribute_value)
|
||||
(quoted_attribute_value)
|
||||
] @string
|
||||
|
||||
[
|
||||
(text)
|
||||
(raw_text_expr)
|
||||
] @none
|
||||
|
||||
[
|
||||
(special_block_keyword)
|
||||
(then)
|
||||
(as)
|
||||
] @keyword
|
||||
|
||||
[
|
||||
"{"
|
||||
"}"
|
||||
] @punctuation.bracket
|
||||
|
||||
"=" @operator
|
||||
|
||||
[
|
||||
"<"
|
||||
">"
|
||||
"</"
|
||||
"/>"
|
||||
"#"
|
||||
":"
|
||||
"/"
|
||||
"@"
|
||||
] @tag.delimiter
|
8
crates/zed/src/languages/svelte/indents.scm
Executable file
8
crates/zed/src/languages/svelte/indents.scm
Executable file
|
@ -0,0 +1,8 @@
|
|||
[
|
||||
(element)
|
||||
(if_statement)
|
||||
(each_statement)
|
||||
(await_statement)
|
||||
(script_element)
|
||||
(style_element)
|
||||
] @indent
|
28
crates/zed/src/languages/svelte/injections.scm
Executable file
28
crates/zed/src/languages/svelte/injections.scm
Executable file
|
@ -0,0 +1,28 @@
|
|||
; injections.scm
|
||||
; --------------
|
||||
(script_element
|
||||
(raw_text) @content
|
||||
(#set! "language" "javascript"))
|
||||
|
||||
((script_element
|
||||
(start_tag
|
||||
(attribute
|
||||
(quoted_attribute_value (attribute_value) @_language)))
|
||||
(raw_text) @content)
|
||||
(#eq? @_language "ts")
|
||||
(#set! "language" "typescript"))
|
||||
|
||||
((script_element
|
||||
(start_tag
|
||||
(attribute
|
||||
(quoted_attribute_value (attribute_value) @_language)))
|
||||
(raw_text) @content)
|
||||
(#eq? @_language "typescript")
|
||||
(#set! "language" "typescript"))
|
||||
|
||||
(style_element
|
||||
(raw_text) @content
|
||||
(#set! "language" "css"))
|
||||
|
||||
((raw_text_expr) @content
|
||||
(#set! "language" "javascript"))
|
35
crates/zed/src/languages/tsx/embedding.scm
Normal file
35
crates/zed/src/languages/tsx/embedding.scm
Normal file
|
@ -0,0 +1,35 @@
|
|||
(enum_declaration
|
||||
"enum" @context
|
||||
name: (_) @name) @item
|
||||
|
||||
(function_declaration
|
||||
"async"? @context
|
||||
"function" @context
|
||||
name: (_) @name) @item
|
||||
|
||||
(interface_declaration
|
||||
"interface" @context
|
||||
name: (_) @name) @item
|
||||
|
||||
(program
|
||||
(lexical_declaration
|
||||
["let" "const"] @context
|
||||
(variable_declarator
|
||||
name: (_) @name) @item))
|
||||
|
||||
(class_declaration
|
||||
"class" @context
|
||||
name: (_) @name) @item
|
||||
|
||||
(method_definition
|
||||
[
|
||||
"get"
|
||||
"set"
|
||||
"async"
|
||||
"*"
|
||||
"readonly"
|
||||
"static"
|
||||
(override_modifier)
|
||||
(accessibility_modifier)
|
||||
]* @context
|
||||
name: (_) @name) @item
|
59
crates/zed/src/languages/typescript/embedding.scm
Normal file
59
crates/zed/src/languages/typescript/embedding.scm
Normal file
|
@ -0,0 +1,59 @@
|
|||
; (internal_module
|
||||
; "namespace" @context
|
||||
; name: (_) @name) @item
|
||||
|
||||
(enum_declaration
|
||||
"enum" @context
|
||||
name: (_) @name) @item
|
||||
|
||||
; (type_alias_declaration
|
||||
; "type" @context
|
||||
; name: (_) @name) @item
|
||||
|
||||
(function_declaration
|
||||
"async"? @context
|
||||
"function" @context
|
||||
name: (_) @name) @item
|
||||
|
||||
(interface_declaration
|
||||
"interface" @context
|
||||
name: (_) @name) @item
|
||||
|
||||
; (export_statement
|
||||
; (lexical_declaration
|
||||
; ["let" "const"] @context
|
||||
; (variable_declarator
|
||||
; name: (_) @name) @item))
|
||||
|
||||
(program
|
||||
(lexical_declaration
|
||||
["let" "const"] @context
|
||||
(variable_declarator
|
||||
name: (_) @name) @item))
|
||||
|
||||
(class_declaration
|
||||
"class" @context
|
||||
name: (_) @name) @item
|
||||
|
||||
(method_definition
|
||||
[
|
||||
"get"
|
||||
"set"
|
||||
"async"
|
||||
"*"
|
||||
"readonly"
|
||||
"static"
|
||||
(override_modifier)
|
||||
(accessibility_modifier)
|
||||
]* @context
|
||||
name: (_) @name) @item
|
||||
|
||||
; (public_field_definition
|
||||
; [
|
||||
; "declare"
|
||||
; "readonly"
|
||||
; "abstract"
|
||||
; "static"
|
||||
; (accessibility_modifier)
|
||||
; ]* @context
|
||||
; name: (_) @name) @item
|
|
@ -157,6 +157,7 @@ fn main() {
|
|||
project_panel::init(cx);
|
||||
diagnostics::init(cx);
|
||||
search::init(cx);
|
||||
vector_store::init(fs.clone(), http.clone(), languages.clone(), cx);
|
||||
vim::init(cx);
|
||||
terminal_view::init(cx);
|
||||
copilot::init(http.clone(), node_runtime, cx);
|
||||
|
|
|
@ -1021,7 +1021,7 @@ mod tests {
|
|||
// Split the pane with the first entry, then open the second entry again.
|
||||
workspace
|
||||
.update(cx, |w, cx| {
|
||||
w.split_pane(w.active_pane().clone(), SplitDirection::Right, cx);
|
||||
w.split_and_clone(w.active_pane().clone(), SplitDirection::Right, cx);
|
||||
w.open_path(file2.clone(), None, true, cx)
|
||||
})
|
||||
.await
|
||||
|
@ -1344,7 +1344,11 @@ mod tests {
|
|||
cx.dispatch_action(window_id, NewFile);
|
||||
workspace
|
||||
.update(cx, |workspace, cx| {
|
||||
workspace.split_pane(workspace.active_pane().clone(), SplitDirection::Right, cx);
|
||||
workspace.split_and_clone(
|
||||
workspace.active_pane().clone(),
|
||||
SplitDirection::Right,
|
||||
cx,
|
||||
);
|
||||
workspace.open_path((worktree.read(cx).id(), "the-new-name.rs"), None, true, cx)
|
||||
})
|
||||
.await
|
||||
|
|
|
@ -4,6 +4,11 @@
|
|||
|
||||
How to build Zed from source for the first time.
|
||||
|
||||
## Prerequisites
|
||||
|
||||
- Be added to the GitHub organization
|
||||
- Be added to the Vercel team
|
||||
|
||||
## Process
|
||||
|
||||
Expect this to take 30min to an hour! Some of these steps will take quite a while based on your connection speed, and how long your first build will be.
|
||||
|
@ -13,11 +18,17 @@ Expect this to take 30min to an hour! Some of these steps will take quite a whil
|
|||
1. Clone the `zed` repo
|
||||
- `gh repo clone zed-industries/zed`
|
||||
1. Install Xcode from the macOS App Store
|
||||
1. Install Xcode command line tools
|
||||
- `xcode-select --install`
|
||||
- If xcode-select --print-path prints /Library/Developer/CommandLineTools… run `sudo xcode-select --switch /Applications/Xcode.app/Contents/Developer.`
|
||||
1. Install [Postgres](https://postgresapp.com)
|
||||
1. Install rust/rustup
|
||||
- `curl --proto '=https' --tlsv1.2 -sSf https://sh.rustup.rs | sh`
|
||||
1. Install the wasm toolchain
|
||||
- `rustup target add wasm32-wasi`
|
||||
1. Install Livekit & Foreman
|
||||
- `brew install livekit`
|
||||
- `brew install foreman`
|
||||
1. Generate an GitHub API Key
|
||||
- Go to https://github.com/settings/tokens and Generate new token
|
||||
- GitHub currently provides two kinds of tokens:
|
||||
|
@ -25,12 +36,26 @@ Expect this to take 30min to an hour! Some of these steps will take quite a whil
|
|||
Unfortunately, unselecting `repo` scope and selecting every its inner scope instead does not allow the token users to read from private repositories
|
||||
- (not applicable) Fine-grained Tokens, at the moment of writing, did not allow any kind of access of non-owned private repos
|
||||
- Keep the token in the browser tab/editor for the next two steps
|
||||
1. (Optional but reccomended) Add your GITHUB_TOKEN to your `.zshrc` or `.bashrc` like this: `export GITHUB_TOKEN=yourGithubAPIToken`
|
||||
1. Ensure the Zed.dev website is checked out in a sibling directory and install it's dependencies:
|
||||
```
|
||||
cd ..
|
||||
git clone https://github.com/zed-industries/zed.dev
|
||||
cd zed.dev && npm install
|
||||
npm install -g vercel
|
||||
```
|
||||
1. Link your zed.dev project to Vercel
|
||||
- `vercel link`
|
||||
- Select the `zed-industries` team. If you don't have this get someone on the team to add you to it.
|
||||
- Select the `zed.dev` project
|
||||
1. Run `vercel pull` to pull down the environment variables and project info from Vercel
|
||||
1. Open Postgres.app
|
||||
1. From `./path/to/zed/`:
|
||||
- Run:
|
||||
- `GITHUB_TOKEN={yourGithubAPIToken} script/bootstrap`
|
||||
- Replace `{yourGithubAPIToken}` with the API token you generated above.
|
||||
- Consider removing the token (if it's fine for you to crecreate such tokens during occasional migrations) or store this token somewhere safe (like your Zed 1Password vault).
|
||||
- Run:
|
||||
- `GITHUB_TOKEN={yourGithubAPIToken} script/bootstrap`
|
||||
- Replace `{yourGithubAPIToken}` with the API token you generated above.
|
||||
- You don't need to include the GITHUB_TOKEN if you exported it above.
|
||||
- Consider removing the token (if it's fine for you to recreate such tokens during occasional migrations) or store this token somewhere safe (like your Zed 1Password vault).
|
||||
- If you get:
|
||||
- ```bash
|
||||
Error: Cannot install in Homebrew on ARM processor in Intel default prefix (/usr/local)!
|
||||
|
@ -51,6 +76,7 @@ Expect this to take 30min to an hour! Some of these steps will take quite a whil
|
|||
- `cargo run --release`
|
||||
- If you need to run the collaboration server locally:
|
||||
- `script/zed-with-local-servers`
|
||||
- If you need to test collaboration with mutl
|
||||
|
||||
## Troubleshooting
|
||||
|
||||
|
|
|
@ -114,7 +114,7 @@ export default function editor(): any {
|
|||
color: foreground(layer, "default"),
|
||||
},
|
||||
hovered: {
|
||||
color: foreground(layer, "variant"),
|
||||
color: foreground(layer, "on"),
|
||||
},
|
||||
},
|
||||
},
|
||||
|
|
Some files were not shown because too many files have changed in this diff Show more
Loading…
Reference in a new issue