Cargo.lock 🔗
@@ -17998,6 +17998,7 @@ name = "title_bar"
version = "0.1.0"
dependencies = [
"anyhow",
+ "arrayvec",
"auto_update",
"call",
"channel",
Lukas Wirth created
Release Notes:
- N/A or Added/Fixed/Improved ...
Cargo.lock | 1
crates/csv_preview/src/renderer/table_cell.rs | 3
crates/edit_prediction_context/src/edit_prediction_context.rs | 56 ++--
crates/editor/src/semantic_tokens.rs | 45 +--
crates/gpui/src/bounds_tree.rs | 24 +
crates/gpui/src/platform.rs | 2
crates/gpui/src/platform/test/window.rs | 6
crates/gpui/src/scene.rs | 24 +-
crates/gpui/src/text_system/line_layout.rs | 2
crates/gpui/src/window.rs | 6
crates/gpui_macos/src/metal_atlas.rs | 4
crates/gpui_macos/src/metal_renderer.rs | 2
crates/gpui_shared_string/gpui_shared_string.rs | 2
crates/gpui_wgpu/src/wgpu_atlas.rs | 4
crates/gpui_windows/src/directx_atlas.rs | 4
crates/gpui_windows/src/window.rs | 2
crates/multi_buffer/src/anchor.rs | 20 +
crates/multi_buffer/src/multi_buffer.rs | 47 ++-
crates/multi_buffer/src/path_key.rs | 21 -
crates/title_bar/Cargo.toml | 1
crates/title_bar/src/title_bar.rs | 3
21 files changed, 141 insertions(+), 138 deletions(-)
@@ -17998,6 +17998,7 @@ name = "title_bar"
version = "0.1.0"
dependencies = [
"anyhow",
+ "arrayvec",
"auto_update",
"call",
"channel",
@@ -39,13 +39,12 @@ fn create_table_cell(
cx: &Context<'_, CsvPreviewView>,
) -> gpui::Stateful<Div> {
div()
- .id(ElementId::NamedInteger(
+ .id(ElementId::Name(
format!(
"csv-display-cell-{}-{}",
*display_cell_id.row, *display_cell_id.col
)
.into(),
- 0,
))
.cursor_pointer()
.flex()
@@ -66,10 +66,14 @@ struct Identifier {
enum DefinitionTask {
CacheHit(Arc<CacheEntry>),
- CacheMiss {
- definitions: Task<Result<Option<Vec<LocationLink>>>>,
- type_definitions: Task<Result<Option<Vec<LocationLink>>>>,
- },
+ CacheMiss(
+ Task<
+ Option<(
+ Task<Result<Option<Vec<LocationLink>>>>,
+ Task<Result<Option<Vec<LocationLink>>>>,
+ )>,
+ >,
+ ),
}
#[derive(Debug)]
@@ -270,39 +274,37 @@ impl RelatedExcerptStore {
let futures = this.update(cx, |this, cx| {
identifiers_with_distance
.into_iter()
- .filter_map(|(identifier, _)| {
+ .map(|(identifier, _)| {
let task = if let Some(entry) = this.cache.get(&identifier) {
DefinitionTask::CacheHit(entry.clone())
} else {
- let definitions = this
- .project
- .update(cx, |project, cx| {
- project.definitions(&buffer, identifier.range.start, cx)
- })
- .ok()?;
- let type_definitions = this
- .project
- .update(cx, |project, cx| {
- project.type_definitions(&buffer, identifier.range.start, cx)
- })
- .ok()?;
- DefinitionTask::CacheMiss {
- definitions,
- type_definitions,
- }
+ let project = this.project.clone();
+ let buffer = buffer.downgrade();
+ DefinitionTask::CacheMiss(cx.spawn(async move |_, cx| {
+ let buffer = buffer.upgrade()?;
+ let definitions = project
+ .update(cx, |project, cx| {
+ project.definitions(&buffer, identifier.range.start, cx)
+ })
+ .ok()?;
+ let type_definitions = project
+ .update(cx, |project, cx| {
+ project.type_definitions(&buffer, identifier.range.start, cx)
+ })
+ .ok()?;
+ Some((definitions, type_definitions))
+ }))
};
let cx = async_cx.clone();
let project = project.clone();
- Some(async move {
+ async move {
match task {
DefinitionTask::CacheHit(cache_entry) => {
Some((identifier, cache_entry, None))
}
- DefinitionTask::CacheMiss {
- definitions,
- type_definitions,
- } => {
+ DefinitionTask::CacheMiss(task) => {
+ let (definitions, type_definitions) = task.await?;
let (definition_locations, type_definition_locations) =
futures::join!(definitions, type_definitions);
let duration = start_time.elapsed();
@@ -349,7 +351,7 @@ impl RelatedExcerptStore {
}))
}
}
- })
+ }
})
.collect::<Vec<_>>()
})?;
@@ -15,7 +15,7 @@ use project::{
project_settings::ProjectSettings,
};
use settings::{
- SemanticTokenColorOverride, SemanticTokenFontStyle, SemanticTokenFontWeight,
+ SemanticTokenColorOverride, SemanticTokenFontStyle, SemanticTokenFontWeight, SemanticTokenRule,
SemanticTokenRules, Settings as _,
};
use text::BufferId;
@@ -295,13 +295,14 @@ impl Editor {
) else {
continue;
};
+ let theme = cx.theme().syntax();
token_highlights.reserve(2 * server_tokens.len());
token_highlights.extend(buffer_into_editor_highlights(
&server_tokens,
stylizer,
&multi_buffer_snapshot,
&mut interner,
- cx,
+ theme,
));
}
@@ -328,7 +329,7 @@ fn buffer_into_editor_highlights<'a, 'b>(
stylizer: &'a SemanticTokenStylizer,
multi_buffer_snapshot: &'a multi_buffer::MultiBufferSnapshot,
interner: &'b mut HighlightStyleInterner,
- cx: &'a App,
+ theme: &'a SyntaxTheme,
) -> impl Iterator<Item = SemanticTokenHighlight> + use<'a, 'b> {
multi_buffer_snapshot
.text_anchors_to_visible_anchors(
@@ -341,12 +342,7 @@ fn buffer_into_editor_highlights<'a, 'b>(
.zip(buffer_tokens)
.filter_map(|((multi_buffer_start, multi_buffer_end), token)| {
let range = multi_buffer_start?..multi_buffer_end?;
- let style = convert_token(
- stylizer,
- cx.theme().syntax(),
- token.token_type,
- token.token_modifiers,
- )?;
+ let style = convert_token(stylizer, theme, token.token_type, token.token_modifiers)?;
let style = interner.intern(style);
Some(SemanticTokenHighlight {
range,
@@ -365,27 +361,19 @@ fn convert_token(
modifiers: u32,
) -> Option<HighlightStyle> {
let rules = stylizer.rules_for_token(token_type)?;
- let matching: Vec<_> = rules
- .iter()
- .filter(|rule| {
- rule.token_modifiers
- .iter()
- .all(|m| stylizer.has_modifier(modifiers, m))
- })
- .collect();
-
- if let Some(rule) = matching.last() {
- if rule.no_style_defined() {
- return None;
- }
+ let filter = |rule: &&SemanticTokenRule| {
+ rule.token_modifiers
+ .iter()
+ .all(|m| stylizer.has_modifier(modifiers, m))
+ };
+ let last = rules.last()?;
+ if last.no_style_defined() && filter(&last) {
+ return None;
}
let mut highlight = HighlightStyle::default();
- let mut empty = true;
-
- for rule in matching {
- empty = false;
+ for rule in rules.into_iter().filter(filter) {
let style = rule
.style
.iter()
@@ -400,7 +388,7 @@ fn convert_token(
highlight.$highlight_field = rule
.$rule_field
.map($transform)
- .or_else(|| style.and_then(|s| s.$highlight_field))
+ .or_else(|| style.as_ref().and_then(|s| s.$highlight_field))
.or(highlight.$highlight_field)
};
}
@@ -460,8 +448,7 @@ fn convert_token(
},
);
}
-
- if empty { None } else { Some(highlight) }
+ Some(highlight)
}
#[cfg(test)]
@@ -3,6 +3,7 @@ use std::{
cmp,
fmt::Debug,
ops::{Add, Sub},
+ ptr::NonNull,
};
/// Maximum children per internal node (R-tree style branching factor).
@@ -30,7 +31,7 @@ where
/// Reusable stack for tree traversal during insertion.
insert_path: Vec<usize>,
/// Reusable stack for search operations.
- search_stack: Vec<usize>,
+ search_stack: Vec<NonNull<Node<U>>>,
}
/// A node in the bounds tree.
@@ -150,12 +151,14 @@ where
// Slow path: search the tree
self.search_stack.clear();
- self.search_stack.push(root_idx);
+ self.search_stack.push(NonNull::from(&self.nodes[root_idx]));
let mut max_found = 0u32;
- while let Some(node_idx) = self.search_stack.pop() {
- let node = &self.nodes[node_idx];
+ while let Some(node) = self.search_stack.pop() {
+ // SAFETY: `node` is guaranteed to be valid as the `nodes` stack is unmodified in this function
+ // and the `search_stack` only contains pointers from this function call.
+ let node = unsafe { node.as_ref() };
// Pruning: skip if this subtree can't improve our result
if node.max_order <= max_found {
@@ -174,11 +177,14 @@ where
NodeKind::Internal { children } => {
// Children are maintained with highest max_order at the end.
// Push in forward order to highest (last) is popped first.
- for &child_idx in children.as_slice() {
- if self.nodes[child_idx].max_order > max_found {
- self.search_stack.push(child_idx);
- }
- }
+ self.search_stack.extend(
+ children
+ .as_slice()
+ .iter()
+ .map(|&child_idx| &self.nodes[child_idx])
+ .filter(|node| node.max_order > max_found)
+ .map(NonNull::from),
+ );
}
}
}
@@ -1048,7 +1048,7 @@ impl<T> AtlasTextureList<T> {
}
}
-#[derive(Clone, Debug, PartialEq, Eq)]
+#[derive(Copy, Clone, Debug, PartialEq, Eq)]
#[repr(C)]
#[expect(missing_docs)]
pub struct AtlasTile {
@@ -353,8 +353,8 @@ impl PlatformAtlas for TestAtlas {
>,
) -> anyhow::Result<Option<crate::AtlasTile>> {
let mut state = self.0.lock();
- if let Some(tile) = state.tiles.get(key) {
- return Ok(Some(tile.clone()));
+ if let Some(&tile) = state.tiles.get(key) {
+ return Ok(Some(tile));
}
drop(state);
@@ -384,7 +384,7 @@ impl PlatformAtlas for TestAtlas {
},
);
- Ok(Some(state.tiles[key].clone()))
+ Ok(Some(state.tiles[key]))
}
fn remove(&self, key: &AtlasKey) {
@@ -88,11 +88,11 @@ impl Scene {
match &mut primitive {
Primitive::Shadow(shadow) => {
shadow.order = order;
- self.shadows.push(shadow.clone());
+ self.shadows.push(*shadow);
}
Primitive::Quad(quad) => {
quad.order = order;
- self.quads.push(quad.clone());
+ self.quads.push(*quad);
}
Primitive::Path(path) => {
path.order = order;
@@ -101,19 +101,19 @@ impl Scene {
}
Primitive::Underline(underline) => {
underline.order = order;
- self.underlines.push(underline.clone());
+ self.underlines.push(*underline);
}
Primitive::MonochromeSprite(sprite) => {
sprite.order = order;
- self.monochrome_sprites.push(sprite.clone());
+ self.monochrome_sprites.push(*sprite);
}
Primitive::SubpixelSprite(sprite) => {
sprite.order = order;
- self.subpixel_sprites.push(sprite.clone());
+ self.subpixel_sprites.push(*sprite);
}
Primitive::PolychromeSprite(sprite) => {
sprite.order = order;
- self.polychrome_sprites.push(sprite.clone());
+ self.polychrome_sprites.push(*sprite);
}
Primitive::Surface(surface) => {
surface.order = order;
@@ -481,7 +481,7 @@ pub enum PrimitiveBatch {
Surfaces(Range<usize>),
}
-#[derive(Default, Debug, Clone)]
+#[derive(Default, Debug, Copy, Clone)]
#[repr(C)]
#[expect(missing_docs)]
pub struct Quad {
@@ -501,7 +501,7 @@ impl From<Quad> for Primitive {
}
}
-#[derive(Debug, Clone)]
+#[derive(Debug, Copy, Clone)]
#[repr(C)]
#[expect(missing_docs)]
pub struct Underline {
@@ -520,7 +520,7 @@ impl From<Underline> for Primitive {
}
}
-#[derive(Debug, Clone)]
+#[derive(Debug, Copy, Clone)]
#[repr(C)]
#[expect(missing_docs)]
pub struct Shadow {
@@ -652,7 +652,7 @@ impl Default for TransformationMatrix {
}
}
-#[derive(Clone, Debug)]
+#[derive(Copy, Clone, Debug)]
#[repr(C)]
#[expect(missing_docs)]
pub struct MonochromeSprite {
@@ -671,7 +671,7 @@ impl From<MonochromeSprite> for Primitive {
}
}
-#[derive(Clone, Debug)]
+#[derive(Copy, Clone, Debug)]
#[repr(C)]
#[expect(missing_docs)]
pub struct SubpixelSprite {
@@ -690,7 +690,7 @@ impl From<SubpixelSprite> for Primitive {
}
}
-#[derive(Clone, Debug)]
+#[derive(Copy, Clone, Debug)]
#[repr(C)]
#[expect(missing_docs)]
pub struct PolychromeSprite {
@@ -186,7 +186,7 @@ impl LineLayout {
if width > wrap_width && boundary > last_boundary {
// When used line_clamp, we should limit the number of lines.
if let Some(max_lines) = max_lines
- && boundaries.len() >= max_lines - 1
+ && boundaries.len() >= max_lines.saturating_sub(1)
{
break;
}
@@ -1627,7 +1627,7 @@ pub struct DispatchEventResult {
/// Indicates which region of the window is visible. Content falling outside of this mask will not be
/// rendered. Currently, only rectangular content masks are supported, but we give the mask its own type
/// to leave room to support more complex shapes in the future.
-#[derive(Clone, Debug, Default, PartialEq, Eq)]
+#[derive(Copy, Clone, Debug, Default, PartialEq, Eq)]
#[repr(C)]
pub struct ContentMask<P: Clone + Debug + Default + PartialEq> {
/// The bounds
@@ -2713,7 +2713,7 @@ impl Window {
.set_active_node(deferred_draw.parent_node);
let paint_start = self.paint_index();
- let content_mask = deferred_draw.content_mask.clone();
+ let content_mask = deferred_draw.content_mask;
if let Some(element) = deferred_draw.element.as_mut() {
self.with_rendered_view(deferred_draw.current_view, |window| {
window.with_content_mask(content_mask, |window| {
@@ -2790,7 +2790,7 @@ impl Window {
parent_node: reused_subtree.refresh_node_id(deferred_draw.parent_node),
element_id_stack: deferred_draw.element_id_stack.clone(),
text_style_stack: deferred_draw.text_style_stack.clone(),
- content_mask: deferred_draw.content_mask.clone(),
+ content_mask: deferred_draw.content_mask,
rem_size: deferred_draw.rem_size,
priority: deferred_draw.priority,
element: None,
@@ -44,7 +44,7 @@ impl PlatformAtlas for MetalAtlas {
) -> Result<Option<AtlasTile>> {
let mut lock = self.0.lock();
if let Some(tile) = lock.tiles_by_key.get(key) {
- Ok(Some(tile.clone()))
+ Ok(Some(*tile))
} else {
let Some((size, bytes)) = build()? else {
return Ok(None);
@@ -54,7 +54,7 @@ impl PlatformAtlas for MetalAtlas {
.context("failed to allocate")?;
let texture = lock.texture(tile.texture_id);
texture.upload(tile.bounds, &bytes);
- lock.tiles_by_key.insert(key.clone(), tile.clone());
+ lock.tiles_by_key.insert(key.clone(), tile);
Ok(Some(tile))
}
}
@@ -1469,7 +1469,7 @@ impl MetalRenderer {
buffer_contents,
SurfaceBounds {
bounds: surface.bounds,
- content_mask: surface.content_mask.clone(),
+ content_mask: surface.content_mask,
},
);
}
@@ -46,7 +46,7 @@ impl JsonSchema for SharedString {
impl Default for SharedString {
fn default() -> Self {
- Self(ArcCow::Owned(Arc::default()))
+ Self(ArcCow::Borrowed(""))
}
}
@@ -103,7 +103,7 @@ impl PlatformAtlas for WgpuAtlas {
) -> Result<Option<AtlasTile>> {
let mut lock = self.0.lock();
if let Some(tile) = lock.tiles_by_key.get(key) {
- Ok(Some(tile.clone()))
+ Ok(Some(*tile))
} else {
profiling::scope!("new tile");
let Some((size, bytes)) = build()? else {
@@ -113,7 +113,7 @@ impl PlatformAtlas for WgpuAtlas {
.allocate(size, key.texture_kind())
.context("failed to allocate")?;
lock.upload_texture(tile.texture_id, tile.bounds, &bytes);
- lock.tiles_by_key.insert(key.clone(), tile.clone());
+ lock.tiles_by_key.insert(key.clone(), tile);
Ok(Some(tile))
}
}
@@ -80,7 +80,7 @@ impl PlatformAtlas for DirectXAtlas {
) -> anyhow::Result<Option<AtlasTile>> {
let mut lock = self.0.lock();
if let Some(tile) = lock.tiles_by_key.get(key) {
- Ok(Some(tile.clone()))
+ Ok(Some(*tile))
} else {
let Some((size, bytes)) = build()? else {
return Ok(None);
@@ -90,7 +90,7 @@ impl PlatformAtlas for DirectXAtlas {
.ok_or_else(|| anyhow::anyhow!("failed to allocate"))?;
let texture = lock.texture(tile.texture_id);
texture.upload(&lock.device_context, tile.bounds, &bytes);
- lock.tiles_by_key.insert(key.clone(), tile.clone());
+ lock.tiles_by_key.insert(key.clone(), tile);
Ok(Some(tile))
}
}
@@ -174,7 +174,7 @@ impl WindowsWindowState {
}
pub(crate) fn is_maximized(&self) -> bool {
- !self.is_fullscreen() && unsafe { IsZoomed(self.hwnd) }.as_bool()
+ !self.is_fullscreen()
}
fn bounds(&self) -> Bounds<Pixels> {
@@ -103,15 +103,16 @@ impl ExcerptAnchor {
}
pub(crate) fn cmp(&self, other: &Self, snapshot: &MultiBufferSnapshot) -> Ordering {
- let Some(self_path_key) = snapshot.path_keys_by_index.get(&self.path) else {
+ let Some(self_path_key) = snapshot.path_keys.get_index(self.path.0 as usize) else {
panic!("anchor's path was never added to multibuffer")
};
- let Some(other_path_key) = snapshot.path_keys_by_index.get(&other.path) else {
+ let Some(other_path_key) = snapshot.path_keys.get_index(other.path.0 as usize) else {
panic!("anchor's path was never added to multibuffer")
};
- if self_path_key.cmp(other_path_key) != Ordering::Equal {
- return self_path_key.cmp(other_path_key);
+ match self_path_key.cmp(other_path_key) {
+ Ordering::Equal => (),
+ ordering => return ordering,
}
// in the case that you removed the buffer containing self,
@@ -122,16 +123,19 @@ impl ExcerptAnchor {
}
// two anchors into the same buffer at the same path
- // TODO(cole) buffer_for_path is slow
let Some(buffer) = snapshot
- .buffer_for_path(&self_path_key)
- .filter(|buffer| buffer.remote_id() == self.text_anchor.buffer_id)
+ .buffers
+ .get(&self.text_anchor.buffer_id)
+ .filter(|buffer_state| buffer_state.path_key == *self_path_key)
else {
// buffer no longer exists at the original path (which may have been reused for a different buffer),
// so no way to compare the anchors
return Ordering::Equal;
};
- let text_cmp = self.text_anchor().cmp(&other.text_anchor(), buffer);
+ // two anchors into the same buffer at the same path that still exists at that path in the multibuffer
+ let text_cmp = self
+ .text_anchor()
+ .cmp(&other.text_anchor(), &buffer.buffer_snapshot);
if text_cmp != Ordering::Equal {
return text_cmp;
}
@@ -15,7 +15,7 @@ use buffer_diff::{
DiffHunkStatus, DiffHunkStatusKind,
};
use clock::ReplicaId;
-use collections::{BTreeMap, Bound, HashMap, HashSet};
+use collections::{BTreeMap, Bound, HashMap, HashSet, IndexSet};
use gpui::{App, Context, Entity, EventEmitter};
use itertools::Itertools;
use language::{
@@ -676,7 +676,7 @@ impl DiffState {
#[derive(Clone)]
struct BufferStateSnapshot {
- path_key: PathKey,
+ pub(crate) path_key: PathKey,
path_key_index: PathKeyIndex,
buffer_snapshot: BufferSnapshot,
}
@@ -695,8 +695,7 @@ impl fmt::Debug for BufferStateSnapshot {
pub struct MultiBufferSnapshot {
excerpts: SumTree<Excerpt>,
buffers: TreeMap<BufferId, BufferStateSnapshot>,
- path_keys_by_index: TreeMap<PathKeyIndex, PathKey>,
- indices_by_path_key: TreeMap<PathKey, PathKeyIndex>,
+ path_keys: Arc<IndexSet<PathKey>>,
diffs: SumTree<DiffStateSnapshot>,
diff_transforms: SumTree<DiffTransform>,
non_text_state_update_count: usize,
@@ -1802,8 +1801,7 @@ impl MultiBuffer {
show_deleted_hunks: _,
use_extended_diff_range: _,
show_headers: _,
- path_keys_by_index: _,
- indices_by_path_key: _,
+ path_keys: _,
buffers,
} = self.snapshot.get_mut();
let start = ExcerptDimension(MultiBufferOffset::ZERO);
@@ -2497,8 +2495,7 @@ impl MultiBuffer {
excerpts,
diffs: buffer_diff,
buffers: buffer_snapshots,
- path_keys_by_index: _,
- indices_by_path_key: _,
+ path_keys: _,
diff_transforms: _,
non_text_state_update_count,
edit_count,
@@ -3582,7 +3579,7 @@ impl MultiBufferSnapshot {
let Some(excerpt) = cursor.item() else {
break;
};
- if &excerpt.path_key != path {
+ if excerpt.path_key != *path {
break;
}
let buffer_snapshot = excerpt.buffer_snapshot(self);
@@ -6356,13 +6353,6 @@ impl MultiBufferSnapshot {
))
}
- pub fn buffer_for_path(&self, path: &PathKey) -> Option<&BufferSnapshot> {
- let (_, _, excerpt) = self
- .excerpts
- .find::<ExcerptSummary, _>((), path, Bias::Left);
- Some(excerpt?.buffer_snapshot(self))
- }
-
pub fn path_for_buffer(&self, buffer_id: BufferId) -> Option<&PathKey> {
Some(&self.buffers.get(&buffer_id)?.path_key)
}
@@ -6378,9 +6368,7 @@ impl MultiBufferSnapshot {
}
fn first_excerpt_for_path(&self, path_key: &PathKey) -> Option<&Excerpt> {
- let (_, _, first_excerpt) =
- self.excerpts
- .find::<ExcerptSummary, _>((), path_key, Bias::Left);
+ let (_, _, first_excerpt) = self.excerpts.find::<PathKey, _>((), path_key, Bias::Left);
first_excerpt
}
@@ -6389,7 +6377,7 @@ impl MultiBufferSnapshot {
}
fn try_path_for_anchor(&self, anchor: ExcerptAnchor) -> Option<&PathKey> {
- self.path_keys_by_index.get(&anchor.path)
+ self.path_keys.get_index(anchor.path.0 as usize)
}
pub fn path_for_anchor(&self, anchor: ExcerptAnchor) -> &PathKey {
@@ -6831,7 +6819,7 @@ impl MultiBufferSnapshot {
excerpt.path_key
);
assert_eq!(
- self.path_keys_by_index.get(&excerpt.path_key_index),
+ self.path_keys.get_index(excerpt.path_key_index.0 as usize),
Some(&excerpt.path_key),
"excerpt path key index does not match path key: {:#?}",
excerpt.path_key,
@@ -7470,6 +7458,23 @@ impl sum_tree::SeekTarget<'_, ExcerptSummary, ExcerptSummary> for AnchorSeekTarg
}
}
+impl sum_tree::ContextLessSummary for PathKey {
+ fn zero() -> Self {
+ PathKey::min()
+ }
+
+ fn add_summary(&mut self, summary: &Self) {
+ debug_assert!(
+ summary >= self,
+ "Path keys must be in ascending order: {:?} > {:?}",
+ summary,
+ self
+ );
+
+ *self = summary.clone();
+ }
+}
+
impl sum_tree::SeekTarget<'_, ExcerptSummary, ExcerptSummary> for PathKey {
fn cmp(
&self,
@@ -253,8 +253,8 @@ impl MultiBuffer {
for (path_index, excerpt_anchors) in &buffers {
let path = snapshot
- .path_keys_by_index
- .get(&path_index)
+ .path_keys
+ .get_index(path_index.0 as usize)
.expect("anchor from wrong multibuffer");
let mut excerpt_anchors = excerpt_anchors.peekable();
@@ -353,18 +353,15 @@ impl MultiBuffer {
pub(crate) fn get_or_create_path_key_index(&mut self, path_key: &PathKey) -> PathKeyIndex {
let mut snapshot = self.snapshot.borrow_mut();
- if let Some(&existing) = snapshot.indices_by_path_key.get(path_key) {
- return existing;
+ if let Some(existing) = snapshot.path_keys.get_index_of(path_key) {
+ return PathKeyIndex(existing as u64);
}
- let index = snapshot
- .path_keys_by_index
- .last()
- .map(|(index, _)| PathKeyIndex(index.0 + 1))
- .unwrap_or(PathKeyIndex(0));
- snapshot.path_keys_by_index.insert(index, path_key.clone());
- snapshot.indices_by_path_key.insert(path_key.clone(), index);
- index
+ PathKeyIndex(
+ Arc::make_mut(&mut snapshot.path_keys)
+ .insert_full(path_key.clone())
+ .0 as u64,
+ )
}
pub fn update_path_excerpts(
@@ -59,6 +59,7 @@ ui.workspace = true
util.workspace = true
workspace.workspace = true
zed_actions.workspace = true
+arrayvec = "0.7.6"
[target.'cfg(windows)'.dependencies]
windows.workspace = true
@@ -7,6 +7,7 @@ mod update_version;
use crate::application_menu::{ApplicationMenu, show_menus};
use crate::plan_chip::PlanChip;
+use arrayvec::ArrayVec;
use git_ui::worktree_picker::WorktreePicker;
pub use platform_title_bar::{
self, DraggedWindowTab, MergeAllWindows, MoveTabToNewWindow, PlatformTitleBar,
@@ -177,7 +178,7 @@ impl Render for TitleBar {
let show_menus = show_menus(cx);
- let mut children = Vec::new();
+ let mut children = <ArrayVec<_, 4>>::new();
let mut project_name = None;
let mut repository = None;