Skip to content

Commit

Permalink
Fix bugs preventing non-staff users from using LLM service (#16307)
Browse files Browse the repository at this point in the history
- db deadlock in GetLlmToken for non-staff users
- typo in allowed model name for non-staff users

Release Notes:

- N/A

---------

Co-authored-by: Marshall <[email protected]>
Co-authored-by: Joseph <[email protected]>
  • Loading branch information
3 people authored Aug 15, 2024
1 parent 931883a commit 6b7664e
Show file tree
Hide file tree
Showing 2 changed files with 10 additions and 12 deletions.
8 changes: 4 additions & 4 deletions crates/collab/src/llm/authorization.rs
Original file line number Diff line number Diff line change
Expand Up @@ -26,7 +26,7 @@ fn authorize_access_to_model(
}

match (provider, model) {
(LanguageModelProvider::Anthropic, model) if model.starts_with("claude-3.5-sonnet") => {
(LanguageModelProvider::Anthropic, model) if model.starts_with("claude-3-5-sonnet") => {
Ok(())
}
_ => Err(Error::http(
Expand Down Expand Up @@ -240,14 +240,14 @@ mod tests {
(
Plan::ZedPro,
LanguageModelProvider::Anthropic,
"claude-3.5-sonnet",
"claude-3-5-sonnet",
true,
),
// Free plan should have access to claude-3.5-sonnet
(
Plan::Free,
LanguageModelProvider::Anthropic,
"claude-3.5-sonnet",
"claude-3-5-sonnet",
true,
),
// Pro plan should NOT have access to other Anthropic models
Expand Down Expand Up @@ -303,7 +303,7 @@ mod tests {

// Staff should have access to all models
let test_cases = vec![
(LanguageModelProvider::Anthropic, "claude-3.5-sonnet"),
(LanguageModelProvider::Anthropic, "claude-3-5-sonnet"),
(LanguageModelProvider::Anthropic, "claude-2"),
(LanguageModelProvider::Anthropic, "claude-123-agi"),
(LanguageModelProvider::OpenAi, "gpt-4"),
Expand Down
14 changes: 6 additions & 8 deletions crates/collab/src/rpc.rs
Original file line number Diff line number Diff line change
Expand Up @@ -71,7 +71,7 @@ use std::{
time::{Duration, Instant},
};
use time::OffsetDateTime;
use tokio::sync::{watch, Semaphore};
use tokio::sync::{watch, MutexGuard, Semaphore};
use tower::ServiceBuilder;
use tracing::{
field::{self},
Expand Down Expand Up @@ -192,7 +192,7 @@ impl Session {
}
}

pub async fn current_plan(&self) -> anyhow::Result<proto::Plan> {
pub async fn current_plan(&self, db: MutexGuard<'_, DbHandle>) -> anyhow::Result<proto::Plan> {
if self.is_staff() {
return Ok(proto::Plan::ZedPro);
}
Expand All @@ -201,7 +201,6 @@ impl Session {
return Ok(proto::Plan::Free);
};

let db = self.db().await;
if db.has_active_billing_subscription(user_id).await? {
Ok(proto::Plan::ZedPro)
} else {
Expand Down Expand Up @@ -3500,7 +3499,7 @@ fn should_auto_subscribe_to_channels(version: ZedVersion) -> bool {
}

async fn update_user_plan(_user_id: UserId, session: &Session) -> Result<()> {
let plan = session.current_plan().await?;
let plan = session.current_plan(session.db().await).await?;

session
.peer
Expand Down Expand Up @@ -4503,7 +4502,7 @@ async fn count_language_model_tokens(
};
authorize_access_to_legacy_llm_endpoints(&session).await?;

let rate_limit: Box<dyn RateLimit> = match session.current_plan().await? {
let rate_limit: Box<dyn RateLimit> = match session.current_plan(session.db().await).await? {
proto::Plan::ZedPro => Box::new(ZedProCountLanguageModelTokensRateLimit),
proto::Plan::Free => Box::new(FreeCountLanguageModelTokensRateLimit),
};
Expand Down Expand Up @@ -4623,7 +4622,7 @@ async fn compute_embeddings(
let api_key = api_key.context("no OpenAI API key configured on the server")?;
authorize_access_to_legacy_llm_endpoints(&session).await?;

let rate_limit: Box<dyn RateLimit> = match session.current_plan().await? {
let rate_limit: Box<dyn RateLimit> = match session.current_plan(session.db().await).await? {
proto::Plan::ZedPro => Box::new(ZedProComputeEmbeddingsRateLimit),
proto::Plan::Free => Box::new(FreeComputeEmbeddingsRateLimit),
};
Expand Down Expand Up @@ -4940,11 +4939,10 @@ async fn get_llm_api_token(
if Utc::now().naive_utc() - account_created_at < MIN_ACCOUNT_AGE_FOR_LLM_USE {
Err(anyhow!("account too young"))?
}

let token = LlmTokenClaims::create(
user.id,
session.is_staff(),
session.current_plan().await?,
session.current_plan(db).await?,
&session.app_state.config,
)?;
response.send(proto::GetLlmTokenResponse { token })?;
Expand Down

0 comments on commit 6b7664e

Please sign in to comment.