Cargo.lock 🔗
@@ -84,7 +84,6 @@ dependencies = [
"gpui",
"isahc",
"language",
- "lazy_static",
"log",
"matrixmultiply",
"ordered-float 2.10.0",
Marshall Bowers created
This PR replaces a `lazy_static!` usage in the `ai` crate with
`OnceLock` from the standard library.
This allows us to drop the `lazy_static` dependency from this crate.
Release Notes:
- N/A
Cargo.lock | 1 -
crates/ai/Cargo.toml | 1 -
crates/ai/src/providers/open_ai/embedding.rs | 8 ++++----
crates/ai/src/providers/open_ai/model.rs | 6 +++---
4 files changed, 7 insertions(+), 9 deletions(-)
@@ -84,7 +84,6 @@ dependencies = [
"gpui",
"isahc",
"language",
- "lazy_static",
"log",
"matrixmultiply",
"ordered-float 2.10.0",
@@ -20,7 +20,6 @@ futures.workspace = true
gpui.workspace = true
isahc.workspace = true
language.workspace = true
-lazy_static.workspace = true
log.workspace = true
matrixmultiply = "0.3.7"
ordered-float.workspace = true
@@ -8,7 +8,6 @@ use gpui::BackgroundExecutor;
use isahc::http::StatusCode;
use isahc::prelude::Configurable;
use isahc::{AsyncBody, Response};
-use lazy_static::lazy_static;
use parking_lot::{Mutex, RwLock};
use parse_duration::parse;
use postage::watch;
@@ -16,7 +15,7 @@ use serde::{Deserialize, Serialize};
use serde_json;
use std::env;
use std::ops::Add;
-use std::sync::Arc;
+use std::sync::{Arc, OnceLock};
use std::time::{Duration, Instant};
use tiktoken_rs::{cl100k_base, CoreBPE};
use util::http::{HttpClient, Request};
@@ -29,8 +28,9 @@ use crate::providers::open_ai::OpenAiLanguageModel;
use crate::providers::open_ai::OPEN_AI_API_URL;
-lazy_static! {
- pub(crate) static ref OPEN_AI_BPE_TOKENIZER: CoreBPE = cl100k_base().unwrap();
+pub(crate) fn open_ai_bpe_tokenizer() -> &'static CoreBPE {
+ static OPEN_AI_BPE_TOKENIZER: OnceLock<CoreBPE> = OnceLock::new();
+ OPEN_AI_BPE_TOKENIZER.get_or_init(|| cl100k_base().unwrap())
}
#[derive(Clone)]
@@ -3,7 +3,7 @@ use tiktoken_rs::CoreBPE;
use crate::models::{LanguageModel, TruncationDirection};
-use super::OPEN_AI_BPE_TOKENIZER;
+use super::open_ai_bpe_tokenizer;
#[derive(Clone)]
pub struct OpenAiLanguageModel {
@@ -13,8 +13,8 @@ pub struct OpenAiLanguageModel {
impl OpenAiLanguageModel {
pub fn load(model_name: &str) -> Self {
- let bpe =
- tiktoken_rs::get_bpe_from_model(model_name).unwrap_or(OPEN_AI_BPE_TOKENIZER.to_owned());
+ let bpe = tiktoken_rs::get_bpe_from_model(model_name)
+ .unwrap_or(open_ai_bpe_tokenizer().to_owned());
OpenAiLanguageModel {
name: model_name.to_string(),
bpe: Some(bpe),