mirror of
https://github.com/zed-industries/zed.git
synced 2025-02-03 17:44:30 +00:00
Fix interaction with Anthropic models when using it via zed.dev (#15009)
Release Notes: - N/A --------- Co-authored-by: Bennet <bennet@zed.dev>
This commit is contained in:
parent
dde9d37cf9
commit
728650f94a
3 changed files with 22 additions and 30 deletions
|
@ -101,7 +101,7 @@ impl LanguageModelCompletionProvider for CloudCompletionProvider {
|
||||||
request: LanguageModelRequest,
|
request: LanguageModelRequest,
|
||||||
cx: &AppContext,
|
cx: &AppContext,
|
||||||
) -> BoxFuture<'static, Result<usize>> {
|
) -> BoxFuture<'static, Result<usize>> {
|
||||||
match request.model {
|
match &request.model {
|
||||||
LanguageModel::Cloud(CloudModel::Gpt4)
|
LanguageModel::Cloud(CloudModel::Gpt4)
|
||||||
| LanguageModel::Cloud(CloudModel::Gpt4Turbo)
|
| LanguageModel::Cloud(CloudModel::Gpt4Turbo)
|
||||||
| LanguageModel::Cloud(CloudModel::Gpt4Omni)
|
| LanguageModel::Cloud(CloudModel::Gpt4Omni)
|
||||||
|
@ -118,19 +118,24 @@ impl LanguageModelCompletionProvider for CloudCompletionProvider {
|
||||||
count_open_ai_tokens(request, cx.background_executor())
|
count_open_ai_tokens(request, cx.background_executor())
|
||||||
}
|
}
|
||||||
LanguageModel::Cloud(CloudModel::Custom { name, .. }) => {
|
LanguageModel::Cloud(CloudModel::Custom { name, .. }) => {
|
||||||
let request = self.client.request(proto::CountTokensWithLanguageModel {
|
if name.starts_with("anthropic/") {
|
||||||
model: name,
|
// Can't find a tokenizer for Anthropic models, so for now just use the same as OpenAI's as an approximation.
|
||||||
messages: request
|
count_open_ai_tokens(request, cx.background_executor())
|
||||||
.messages
|
} else {
|
||||||
.iter()
|
let request = self.client.request(proto::CountTokensWithLanguageModel {
|
||||||
.map(|message| message.to_proto())
|
model: name.clone(),
|
||||||
.collect(),
|
messages: request
|
||||||
});
|
.messages
|
||||||
async move {
|
.iter()
|
||||||
let response = request.await?;
|
.map(|message| message.to_proto())
|
||||||
Ok(response.token_count as usize)
|
.collect(),
|
||||||
|
});
|
||||||
|
async move {
|
||||||
|
let response = request.await?;
|
||||||
|
Ok(response.token_count as usize)
|
||||||
|
}
|
||||||
|
.boxed()
|
||||||
}
|
}
|
||||||
.boxed()
|
|
||||||
}
|
}
|
||||||
_ => future::ready(Err(anyhow!("invalid model"))).boxed(),
|
_ => future::ready(Err(anyhow!("invalid model"))).boxed(),
|
||||||
}
|
}
|
||||||
|
|
|
@ -1,4 +1,3 @@
|
||||||
use crate::LanguageModelRequest;
|
|
||||||
pub use anthropic::Model as AnthropicModel;
|
pub use anthropic::Model as AnthropicModel;
|
||||||
pub use ollama::Model as OllamaModel;
|
pub use ollama::Model as OllamaModel;
|
||||||
pub use open_ai::Model as OpenAiModel;
|
pub use open_ai::Model as OpenAiModel;
|
||||||
|
@ -88,19 +87,4 @@ impl CloudModel {
|
||||||
Self::Custom { max_tokens, .. } => max_tokens.unwrap_or(200_000),
|
Self::Custom { max_tokens, .. } => max_tokens.unwrap_or(200_000),
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
pub fn preprocess_request(&self, request: &mut LanguageModelRequest) {
|
|
||||||
match self {
|
|
||||||
Self::Claude3Opus
|
|
||||||
| Self::Claude3Sonnet
|
|
||||||
| Self::Claude3Haiku
|
|
||||||
| Self::Claude3_5Sonnet => {
|
|
||||||
request.preprocess_anthropic();
|
|
||||||
}
|
|
||||||
Self::Custom { name, .. } if name.starts_with("anthropic/") => {
|
|
||||||
request.preprocess_anthropic();
|
|
||||||
}
|
|
||||||
_ => {}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
}
|
||||||
|
|
|
@ -45,7 +45,7 @@ impl LanguageModelRequest {
|
||||||
pub fn preprocess(&mut self) {
|
pub fn preprocess(&mut self) {
|
||||||
match &self.model {
|
match &self.model {
|
||||||
LanguageModel::OpenAi(_) => {}
|
LanguageModel::OpenAi(_) => {}
|
||||||
LanguageModel::Anthropic(_) => {}
|
LanguageModel::Anthropic(_) => self.preprocess_anthropic(),
|
||||||
LanguageModel::Ollama(_) => {}
|
LanguageModel::Ollama(_) => {}
|
||||||
LanguageModel::Cloud(model) => match model {
|
LanguageModel::Cloud(model) => match model {
|
||||||
CloudModel::Claude3Opus
|
CloudModel::Claude3Opus
|
||||||
|
@ -54,6 +54,9 @@ impl LanguageModelRequest {
|
||||||
| CloudModel::Claude3_5Sonnet => {
|
| CloudModel::Claude3_5Sonnet => {
|
||||||
self.preprocess_anthropic();
|
self.preprocess_anthropic();
|
||||||
}
|
}
|
||||||
|
CloudModel::Custom { name, .. } if name.starts_with("anthropic/") => {
|
||||||
|
self.preprocess_anthropic();
|
||||||
|
}
|
||||||
_ => {}
|
_ => {}
|
||||||
},
|
},
|
||||||
}
|
}
|
||||||
|
|
Loading…
Reference in a new issue