From 0d0ce95eaebf4e708caec8211b06f2a12ccc4c7e Mon Sep 17 00:00:00 2001 From: Marshall Bowers Date: Thu, 29 Feb 2024 23:37:20 -0500 Subject: [PATCH] Replace `lazy_static!` with `OnceLock` in `ai` crate (#8647) This PR replaces a `lazy_static!` usage in the `ai` crate with `OnceLock` from the standard library. This allows us to drop the `lazy_static` dependency from this crate. Release Notes: - N/A --- Cargo.lock | 1 - crates/ai/Cargo.toml | 1 - crates/ai/src/providers/open_ai/embedding.rs | 8 ++++---- crates/ai/src/providers/open_ai/model.rs | 6 +++--- 4 files changed, 7 insertions(+), 9 deletions(-) diff --git a/Cargo.lock b/Cargo.lock index 7c77215e07bf9890726868ba84bc4accb37ecd0a..57b487470a89a7a57634a64dfddb2b6862a32e5e 100644 --- a/Cargo.lock +++ b/Cargo.lock @@ -84,7 +84,6 @@ dependencies = [ "gpui", "isahc", "language", - "lazy_static", "log", "matrixmultiply", "ordered-float 2.10.0", diff --git a/crates/ai/Cargo.toml b/crates/ai/Cargo.toml index 1aa2f6d48ebbefa10e990ef74e7d9577ff8a1448..726c7329dc5da2d10155bb3dcf2858d2ca250894 100644 --- a/crates/ai/Cargo.toml +++ b/crates/ai/Cargo.toml @@ -20,7 +20,6 @@ futures.workspace = true gpui.workspace = true isahc.workspace = true language.workspace = true -lazy_static.workspace = true log.workspace = true matrixmultiply = "0.3.7" ordered-float.workspace = true diff --git a/crates/ai/src/providers/open_ai/embedding.rs b/crates/ai/src/providers/open_ai/embedding.rs index 588861a972a5f1dfb82495e482b264b52db3859e..ddff082359df30c0c3c2e99e8104055cd5ed7143 100644 --- a/crates/ai/src/providers/open_ai/embedding.rs +++ b/crates/ai/src/providers/open_ai/embedding.rs @@ -8,7 +8,6 @@ use gpui::BackgroundExecutor; use isahc::http::StatusCode; use isahc::prelude::Configurable; use isahc::{AsyncBody, Response}; -use lazy_static::lazy_static; use parking_lot::{Mutex, RwLock}; use parse_duration::parse; use postage::watch; @@ -16,7 +15,7 @@ use serde::{Deserialize, Serialize}; use serde_json; use std::env; use std::ops::Add; -use std::sync::Arc; +use std::sync::{Arc, OnceLock}; use std::time::{Duration, Instant}; use tiktoken_rs::{cl100k_base, CoreBPE}; use util::http::{HttpClient, Request}; @@ -29,8 +28,9 @@ use crate::providers::open_ai::OpenAiLanguageModel; use crate::providers::open_ai::OPEN_AI_API_URL; -lazy_static! { - pub(crate) static ref OPEN_AI_BPE_TOKENIZER: CoreBPE = cl100k_base().unwrap(); +pub(crate) fn open_ai_bpe_tokenizer() -> &'static CoreBPE { + static OPEN_AI_BPE_TOKENIZER: OnceLock = OnceLock::new(); + OPEN_AI_BPE_TOKENIZER.get_or_init(|| cl100k_base().unwrap()) } #[derive(Clone)] diff --git a/crates/ai/src/providers/open_ai/model.rs b/crates/ai/src/providers/open_ai/model.rs index 21ea0334bdcfcb0c52076bc400390f76176ca84c..f2f75977e488e655b702dc2875d22480280ea306 100644 --- a/crates/ai/src/providers/open_ai/model.rs +++ b/crates/ai/src/providers/open_ai/model.rs @@ -3,7 +3,7 @@ use tiktoken_rs::CoreBPE; use crate::models::{LanguageModel, TruncationDirection}; -use super::OPEN_AI_BPE_TOKENIZER; +use super::open_ai_bpe_tokenizer; #[derive(Clone)] pub struct OpenAiLanguageModel { @@ -13,8 +13,8 @@ pub struct OpenAiLanguageModel { impl OpenAiLanguageModel { pub fn load(model_name: &str) -> Self { - let bpe = - tiktoken_rs::get_bpe_from_model(model_name).unwrap_or(OPEN_AI_BPE_TOKENIZER.to_owned()); + let bpe = tiktoken_rs::get_bpe_from_model(model_name) + .unwrap_or(open_ai_bpe_tokenizer().to_owned()); OpenAiLanguageModel { name: model_name.to_string(), bpe: Some(bpe),