Detailed changes
@@ -1248,7 +1248,7 @@ fn full_mention_for_directory(
worktree_id,
path: worktree_path,
};
- buffer_store.open_buffer(project_path, None, false, true, cx)
+ buffer_store.open_buffer(project_path, &Default::default(), cx)
})
});
@@ -287,7 +287,7 @@ impl DirectoryContextHandle {
let open_task = project.update(cx, |project, cx| {
project.buffer_store().update(cx, |buffer_store, cx| {
let project_path = ProjectPath { worktree_id, path };
- buffer_store.open_buffer(project_path, None, false, true, cx)
+ buffer_store.open_buffer(project_path, &Default::default(), cx)
})
});
@@ -1241,7 +1241,7 @@ async fn get_copilot_lsp(fs: Arc<dyn Fs>, node_runtime: NodeRuntime) -> anyhow::
#[cfg(test)]
mod tests {
use super::*;
- use encodings::Encoding;
+ use encodings::{Encoding, EncodingOptions};
use gpui::TestAppContext;
use util::{path, paths::PathStyle, rel_path::rel_path};
@@ -1455,9 +1455,7 @@ mod tests {
fn load(
&self,
_: &App,
- _: Encoding,
- _: bool,
- _: bool,
+ _: &EncodingOptions,
_: Option<Arc<Encoding>>,
) -> Task<Result<String>> {
unimplemented!()
@@ -523,7 +523,7 @@ impl SyntaxIndex {
};
let snapshot_task = worktree.update(cx, |worktree, cx| {
- let load_task = worktree.load_file(&project_path.path, None, false, true, None, cx);
+ let load_task = worktree.load_file(&project_path.path, &Default::default(), None, cx);
let worktree_abs_path = worktree.abs_path();
cx.spawn(async move |_this, cx| {
@@ -34,6 +34,12 @@ impl Default for Encoding {
}
}
+impl From<&'static encoding_rs::Encoding> for Encoding {
+ fn from(encoding: &'static encoding_rs::Encoding) -> Self {
+ Encoding::new(encoding)
+ }
+}
+
unsafe impl Send for Encoding {}
unsafe impl Sync for Encoding {}
@@ -120,13 +126,19 @@ impl Encoding {
/// Convert a byte vector from a specified encoding to a UTF-8 string.
pub async fn to_utf8(
input: Vec<u8>,
- encoding: Encoding,
- force: bool,
- detect_utf16: bool,
+ options: &EncodingOptions,
buffer_encoding: Option<Arc<Encoding>>,
) -> anyhow::Result<String> {
- encoding
- .decode(input, force, detect_utf16, buffer_encoding)
+ options
+ .encoding
+ .decode(
+ input,
+ options.force.load(std::sync::atomic::Ordering::Acquire),
+ options
+ .detect_utf16
+ .load(std::sync::atomic::Ordering::Acquire),
+ buffer_encoding,
+ )
.await
}
@@ -162,3 +174,15 @@ impl Default for EncodingOptions {
}
}
}
+
+impl Clone for EncodingOptions {
+ fn clone(&self) -> Self {
+ EncodingOptions {
+ encoding: Arc::new(self.encoding.get().into()),
+ force: AtomicBool::new(self.force.load(std::sync::atomic::Ordering::Acquire)),
+ detect_utf16: AtomicBool::new(
+ self.detect_utf16.load(std::sync::atomic::Ordering::Acquire),
+ ),
+ }
+ }
+}
@@ -58,11 +58,9 @@ use smol::io::AsyncReadExt;
#[cfg(any(test, feature = "test-support"))]
use std::ffi::OsStr;
+use encodings::{Encoding, EncodingOptions, from_utf8, to_utf8};
#[cfg(any(test, feature = "test-support"))]
pub use fake_git_repo::{LOAD_HEAD_TEXT_TASK, LOAD_INDEX_TEXT_TASK};
-use encodings::Encoding;
-use encodings::from_utf8;
-use encodings::to_utf8;
pub trait Watcher: Send + Sync {
fn add(&self, path: &Path) -> Result<()>;
@@ -122,19 +120,10 @@ pub trait Fs: Send + Sync {
async fn load_with_encoding(
&self,
path: &Path,
- encoding: Encoding,
- force: bool,
- detect_utf16: bool,
+ options: &EncodingOptions,
buffer_encoding: Option<Arc<Encoding>>,
) -> Result<String> {
- Ok(to_utf8(
- self.load_bytes(path).await?,
- encoding,
- force,
- detect_utf16,
- buffer_encoding,
- )
- .await?)
+ Ok(to_utf8(self.load_bytes(path).await?, options, buffer_encoding).await?)
}
async fn load_bytes(&self, path: &Path) -> Result<Vec<u8>>;
@@ -21,7 +21,7 @@ use anyhow::{Context as _, Result};
use clock::Lamport;
pub use clock::ReplicaId;
use collections::HashMap;
-use encodings::Encoding;
+use encodings::{Encoding, EncodingOptions};
use fs::MTime;
use futures::channel::oneshot;
use gpui::{
@@ -422,9 +422,7 @@ pub trait LocalFile: File {
fn load(
&self,
cx: &App,
- encoding: Encoding,
- force: bool,
- detect_utf16: bool,
+ options: &EncodingOptions,
buffer_encoding: Option<Arc<Encoding>>,
) -> Task<Result<String>>;
@@ -1372,6 +1370,8 @@ impl Buffer {
let encoding = (*self.encoding).clone();
let buffer_encoding = self.encoding.clone();
+ let options = EncodingOptions::default();
+ options.encoding.set(encoding.get());
let prev_version = self.text.version();
self.reload_task = Some(cx.spawn(async move |this, cx| {
@@ -1379,7 +1379,7 @@ impl Buffer {
let file = this.file.as_ref()?.as_local()?;
Some((file.disk_state().mtime(), {
- file.load(cx, encoding, false, true, Some(buffer_encoding))
+ file.load(cx, &options, Some(buffer_encoding))
}))
})?
else {
@@ -5271,9 +5271,7 @@ impl LocalFile for TestFile {
fn load(
&self,
_cx: &App,
- _encoding: Encoding,
- _force: bool,
- _detect_utf16: bool,
+ _options: &EncodingOptions,
_buffer_encoding: Option<Arc<Encoding>>,
) -> Task<Result<String>> {
unimplemented!()
@@ -57,7 +57,7 @@ impl ContextProvider for JsonTaskProvider {
let contents = file
.worktree
.update(cx, |this, cx| {
- this.load_file(&file.path, None, false, true, None, cx)
+ this.load_file(&file.path, &Default::default(), None, cx)
})
.ok()?
.await
@@ -7,7 +7,7 @@ use crate::{
use anyhow::{Context as _, Result, anyhow};
use client::Client;
use collections::{HashMap, HashSet, hash_map};
-use encodings::Encoding;
+use encodings::EncodingOptions;
use fs::Fs;
use futures::StreamExt;
use futures::{Future, FutureExt as _, channel::oneshot, future::Shared};
@@ -629,23 +629,17 @@ impl LocalBufferStore {
&self,
path: Arc<RelPath>,
worktree: Entity<Worktree>,
- encoding: Option<Encoding>,
- force: bool,
- detect_utf16: bool,
+ options: &EncodingOptions,
cx: &mut Context<BufferStore>,
) -> Task<Result<Entity<Buffer>>> {
+ let options = options.clone();
+ let encoding = options.encoding.clone();
+
let load_buffer = worktree.update(cx, |worktree, cx| {
let reservation = cx.reserve_entity();
let buffer_id = BufferId::from(reservation.entity_id().as_non_zero_u64());
- let load_file_task = worktree.load_file(
- path.as_ref(),
- encoding.clone(),
- force,
- detect_utf16,
- None,
- cx,
- );
+ let load_file_task = worktree.load_file(path.as_ref(), &options, None, cx);
cx.spawn(async move |_, cx| {
let loaded_file = load_file_task.await?;
@@ -682,11 +676,7 @@ impl LocalBufferStore {
entry_id: None,
is_local: true,
is_private: false,
- encoding: Some(Arc::new(if let Some(encoding) = &encoding {
- encoding.clone()
- } else {
- Encoding::default()
- })),
+ encoding: Some(encoding.clone()),
})),
Capability::ReadWrite,
)
@@ -714,11 +704,7 @@ impl LocalBufferStore {
anyhow::Ok(())
})??;
- buffer.update(cx, |buffer, _| {
- buffer
- .encoding
- .set(encoding.unwrap_or(Encoding::default()).get())
- })?;
+ buffer.update(cx, |buffer, _| buffer.encoding.set(encoding.get()))?;
Ok(buffer)
})
@@ -850,9 +836,7 @@ impl BufferStore {
pub fn open_buffer(
&mut self,
project_path: ProjectPath,
- encoding: Option<Encoding>,
- force: bool,
- detect_utf16: bool,
+ options: &EncodingOptions,
cx: &mut Context<Self>,
) -> Task<Result<Entity<Buffer>>> {
if let Some(buffer) = self.get_by_path(&project_path) {
@@ -876,9 +860,7 @@ impl BufferStore {
return Task::ready(Err(anyhow!("no such worktree")));
};
let load_buffer = match &self.state {
- BufferStoreState::Local(this) => {
- this.open_buffer(path, worktree, encoding, force, detect_utf16, cx)
- }
+ BufferStoreState::Local(this) => this.open_buffer(path, worktree, options, cx),
BufferStoreState::Remote(this) => this.open_buffer(path, worktree, cx),
};
@@ -1191,7 +1173,7 @@ impl BufferStore {
let buffers = this.update(cx, |this, cx| {
project_paths
.into_iter()
- .map(|project_path| this.open_buffer(project_path, None, false, true, cx))
+ .map(|project_path| this.open_buffer(project_path, &Default::default(), cx))
.collect::<Vec<_>>()
})?;
for buffer_task in buffers {
@@ -796,7 +796,7 @@ impl BreakpointStore {
worktree_id: worktree.read(cx).id(),
path: relative_path,
};
- this.open_buffer(path, None, false, true, cx)
+ this.open_buffer(path, &Default::default(), cx)
})?
.await;
let Ok(buffer) = buffer else {
@@ -8336,7 +8336,7 @@ impl LspStore {
lsp_store
.update(cx, |lsp_store, cx| {
lsp_store.buffer_store().update(cx, |buffer_store, cx| {
- buffer_store.open_buffer(project_path, None, false, true, cx)
+ buffer_store.open_buffer(project_path, &Default::default(), cx)
})
})?
.await
@@ -91,7 +91,7 @@ pub fn cancel_flycheck(
let buffer = buffer_path.map(|buffer_path| {
project.update(cx, |project, cx| {
project.buffer_store().update(cx, |buffer_store, cx| {
- buffer_store.open_buffer(buffer_path, None, false, true, cx)
+ buffer_store.open_buffer(buffer_path, &Default::default(), cx)
})
})
});
@@ -140,7 +140,7 @@ pub fn run_flycheck(
let buffer = buffer_path.map(|buffer_path| {
project.update(cx, |project, cx| {
project.buffer_store().update(cx, |buffer_store, cx| {
- buffer_store.open_buffer(buffer_path, None, false, true, cx)
+ buffer_store.open_buffer(buffer_path, &Default::default(), cx)
})
})
});
@@ -198,7 +198,7 @@ pub fn clear_flycheck(
let buffer = buffer_path.map(|buffer_path| {
project.update(cx, |project, cx| {
project.buffer_store().update(cx, |buffer_store, cx| {
- buffer_store.open_buffer(buffer_path, None, false, true, cx)
+ buffer_store.open_buffer(buffer_path, &Default::default(), cx)
})
})
});
@@ -2720,13 +2720,7 @@ impl Project {
}
self.buffer_store.update(cx, |buffer_store, cx| {
- buffer_store.open_buffer(
- path.into(),
- Some((*self.encoding_options.encoding).clone()),
- *self.encoding_options.force.get_mut(),
- *self.encoding_options.detect_utf16.get_mut(),
- cx,
- )
+ buffer_store.open_buffer(path.into(), &self.encoding_options, cx)
})
}
@@ -5409,7 +5403,7 @@ impl Project {
cx.spawn(async move |cx| {
let file = worktree
.update(cx, |worktree, cx| {
- worktree.load_file(&rel_path, None, false, true, None, cx)
+ worktree.load_file(&rel_path, &Default::default(), None, cx)
})?
.await
.context("Failed to load settings file")?;
@@ -3880,9 +3880,7 @@ async fn test_rename_file_to_new_directory(cx: &mut gpui::TestAppContext) {
.update(cx, |worktree, cx| {
worktree.load_file(
rel_path("dir1/dir2/dir3/test.txt"),
- None,
- false,
- true,
+ &Default::default(),
None,
cx,
)
@@ -3932,7 +3930,12 @@ async fn test_rename_file_to_new_directory(cx: &mut gpui::TestAppContext) {
assert_eq!(
worktree
.update(cx, |worktree, cx| {
- worktree.load_file(rel_path("dir1/dir2/test.txt"), None, false, true, None, cx)
+ worktree.load_file(
+ rel_path("dir1/dir2/test.txt"),
+ &Default::default(),
+ None,
+ cx,
+ )
})
.await
.unwrap()
@@ -9012,9 +9015,7 @@ async fn test_ignored_dirs_events(cx: &mut gpui::TestAppContext) {
tree.update(cx, |tree, cx| {
tree.load_file(
rel_path("project/target/debug/important_text.txt"),
- None,
- false,
- true,
+ &Default::default(),
None,
cx,
)
@@ -9180,9 +9181,7 @@ async fn test_odd_events_for_ignored_dirs(
tree.update(cx, |tree, cx| {
tree.load_file(
rel_path("target/debug/foo.txt"),
- None,
- false,
- true,
+ &Default::default(),
None,
cx,
)
@@ -506,7 +506,7 @@ impl HeadlessProject {
let (buffer_store, buffer) = this.update(&mut cx, |this, cx| {
let buffer_store = this.buffer_store.clone();
let buffer = this.buffer_store.update(cx, |buffer_store, cx| {
- buffer_store.open_buffer(ProjectPath { worktree_id, path }, None, false, true, cx)
+ buffer_store.open_buffer(ProjectPath { worktree_id, path }, &Default::default(), cx)
});
anyhow::Ok((buffer_store, buffer))
})??;
@@ -597,9 +597,7 @@ impl HeadlessProject {
worktree_id: worktree.read(cx).id(),
path: path,
},
- None,
- false,
- true,
+ &Default::default(),
cx,
)
});
@@ -7,7 +7,7 @@ use ::ignore::gitignore::{Gitignore, GitignoreBuilder};
use anyhow::{Context as _, Result, anyhow};
use clock::ReplicaId;
use collections::{HashMap, HashSet, VecDeque};
-use encodings::Encoding;
+use encodings::{Encoding, EncodingOptions};
use fs::{Fs, MTime, PathEvent, RemoveOptions, Watcher, copy_recursive, read_dir_items};
use futures::{
FutureExt as _, Stream, StreamExt,
@@ -707,16 +707,12 @@ impl Worktree {
pub fn load_file(
&self,
path: &RelPath,
- encoding: Option<Encoding>,
- force: bool,
- detect_utf16: bool,
+ options: &EncodingOptions,
buffer_encoding: Option<Arc<Encoding>>,
cx: &Context<Worktree>,
) -> Task<Result<LoadedFile>> {
match self {
- Worktree::Local(this) => {
- this.load_file(path, encoding, force, detect_utf16, buffer_encoding, cx)
- }
+ Worktree::Local(this) => this.load_file(path, options, buffer_encoding, cx),
Worktree::Remote(_) => {
Task::ready(Err(anyhow!("remote worktrees can't yet load files")))
}
@@ -1327,9 +1323,7 @@ impl LocalWorktree {
fn load_file(
&self,
path: &RelPath,
- encoding: Option<Encoding>,
- force: bool,
- detect_utf16: bool,
+ options: &EncodingOptions,
buffer_encoding: Option<Arc<Encoding>>,
cx: &Context<Worktree>,
) -> Task<Result<LoadedFile>> {
@@ -1338,6 +1332,8 @@ impl LocalWorktree {
let fs = self.fs.clone();
let entry = self.refresh_entry(path.clone(), None, cx);
let is_private = self.is_path_private(path.as_ref());
+ let options = options.clone();
+ let encoding = options.encoding.clone();
let this = cx.weak_entity();
cx.background_spawn(async move {
@@ -1356,17 +1352,7 @@ impl LocalWorktree {
}
}
let text = fs
- .load_with_encoding(
- &abs_path,
- if let Some(ref encoding) = encoding {
- Encoding::new(encoding.get())
- } else {
- Encoding::new(encodings::UTF_8)
- },
- force,
- detect_utf16,
- buffer_encoding.clone(),
- )
+ .load_with_encoding(&abs_path, &options, buffer_encoding.clone())
.await?;
let worktree = this.upgrade().context("worktree was dropped")?;
@@ -1391,7 +1377,7 @@ impl LocalWorktree {
},
is_local: true,
is_private,
- encoding: encoding.map(|e| Arc::new(Encoding::new(e.get()))),
+ encoding: Some(encoding),
})
}
};
@@ -3180,16 +3166,25 @@ impl language::LocalFile for File {
fn load(
&self,
cx: &App,
- encoding: Encoding,
- force: bool,
- detect_utf16: bool,
+ options: &EncodingOptions,
buffer_encoding: Option<Arc<Encoding>>,
) -> Task<Result<String>> {
let worktree = self.worktree.read(cx).as_local().unwrap();
let abs_path = worktree.absolutize(&self.path);
let fs = worktree.fs.clone();
+ let options = EncodingOptions {
+ encoding: options.encoding.clone(),
+ force: std::sync::atomic::AtomicBool::new(
+ options.force.load(std::sync::atomic::Ordering::Acquire),
+ ),
+ detect_utf16: std::sync::atomic::AtomicBool::new(
+ options
+ .detect_utf16
+ .load(std::sync::atomic::Ordering::Acquire),
+ ),
+ };
cx.background_spawn(async move {
- fs.load_with_encoding(&abs_path, encoding, force, detect_utf16, buffer_encoding)
+ fs.load_with_encoding(&abs_path, &options, buffer_encoding)
.await
})
}
@@ -469,9 +469,7 @@ async fn test_open_gitignored_files(cx: &mut TestAppContext) {
.update(cx, |tree, cx| {
tree.load_file(
rel_path("one/node_modules/b/b1.js"),
- None,
- false,
- false,
+ &Default::default(),
None,
cx,
)
@@ -516,9 +514,7 @@ async fn test_open_gitignored_files(cx: &mut TestAppContext) {
.update(cx, |tree, cx| {
tree.load_file(
rel_path("one/node_modules/a/a2.js"),
- None,
- false,
- false,
+ &Default::default(),
None,
cx,
)
@@ -1986,7 +1986,7 @@ mod tests {
.worktree_for_root_name("closed_source_worktree", cx)
.unwrap();
worktree2.update(cx, |worktree2, cx| {
- worktree2.load_file(rel_path("main.rs"), None, false, true, None, cx)
+ worktree2.load_file(rel_path("main.rs"), &Default::default(), None, cx)
})
})
.await