Detailed changes
@@ -5275,6 +5275,16 @@ dependencies = [
"regex",
]
+[[package]]
+name = "tree-sitter-c"
+version = "0.20.1"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "7bdc5574c6cbc39c409246caeb1dd4d3c4bd6d30d4e9b399776086c20365fd24"
+dependencies = [
+ "cc",
+ "tree-sitter",
+]
+
[[package]]
name = "tree-sitter-markdown"
version = "0.0.1"
@@ -5884,6 +5894,7 @@ dependencies = [
"tiny_http",
"toml",
"tree-sitter",
+ "tree-sitter-c",
"tree-sitter-markdown",
"tree-sitter-rust",
"unindent",
@@ -92,6 +92,7 @@ time = "0.3"
tiny_http = "0.8"
toml = "0.5"
tree-sitter = "0.20.4"
+tree-sitter-c = "0.20.1"
tree-sitter-rust = "0.20.1"
tree-sitter-markdown = { git = "https://github.com/MDeiml/tree-sitter-markdown", rev = "330ecab87a3e3a7211ac69bbadc19eabecdb1cca" }
url = "2.2"
@@ -0,0 +1,13 @@
+name = "C"
+path_suffixes = ["c", "h"]
+line_comment = "// "
+brackets = [
+ { start = "{", end = "}", close = true, newline = true },
+ { start = "[", end = "]", close = true, newline = true },
+ { start = "(", end = ")", close = true, newline = true },
+ { start = "\"", end = "\"", close = true, newline = false },
+ { start = "/*", end = " */", close = true, newline = false },
+]
+
+[language_server]
+disk_based_diagnostic_sources = []
@@ -0,0 +1,101 @@
+[
+ "break"
+ "case"
+ "const"
+ "continue"
+ "default"
+ "do"
+ "else"
+ "enum"
+ "extern"
+ "for"
+ "if"
+ "inline"
+ "return"
+ "sizeof"
+ "static"
+ "struct"
+ "switch"
+ "typedef"
+ "union"
+ "volatile"
+ "while"
+] @keyword
+
+[
+ "#define"
+ "#elif"
+ "#else"
+ "#endif"
+ "#if"
+ "#ifdef"
+ "#ifndef"
+ "#include"
+ (preproc_directive)
+] @keyword
+
+[
+ "--"
+ "-"
+ "-="
+ "->"
+ "="
+ "!="
+ "*"
+ "&"
+ "&&"
+ "+"
+ "++"
+ "+="
+ "<"
+ "=="
+ ">"
+ "||"
+] @operator
+
+[
+ "."
+ ";"
+] @delimiter
+
+[
+ (string_literal)
+ (system_lib_string)
+ (char_literal)
+] @string
+
+(comment) @comment
+
+(null) @constant
+(number_literal) @number
+
+[
+ (true)
+ (false)
+ (null)
+] @constant
+
+(identifier) @variable
+
+((identifier) @constant
+ (#match? @constant "^[A-Z][A-Z\\d_]*$"))
+
+(call_expression
+ function: (identifier) @function)
+(call_expression
+ function: (field_expression
+ field: (field_identifier) @function))
+(function_declarator
+ declarator: (identifier) @function)
+(preproc_function_def
+ name: (identifier) @function.special)
+
+(field_identifier) @property
+(statement_identifier) @label
+
+[
+ (type_identifier)
+ (primitive_type)
+ (sized_type_specifier)
+] @type
+
@@ -0,0 +1,7 @@
+[
+ (field_expression)
+ (assignment_expression)
+] @indent
+
+(_ "{" "}" @end) @indent
+(_ "(" ")" @end) @indent
@@ -0,0 +1,30 @@
+(preproc_def
+ "#define" @context
+ name: (_) @name) @item
+
+(preproc_function_def
+ "#define" @context
+ name: (_) @name
+ parameters: (preproc_params
+ "(" @context
+ ")" @context)) @item
+
+(type_definition
+ "typedef" @context
+ declarator: (_) @name) @item
+
+(declaration
+ type: (_) @context
+ declarator: (function_declarator
+ declarator: (_) @name
+ parameters: (parameter_list
+ "(" @context
+ ")" @context))) @item
+
+(function_definition
+ type: (_) @context
+ declarator: (function_declarator
+ declarator: (_) @name
+ parameters: (parameter_list
+ "(" @context
+ ")" @context))) @item
@@ -1,4 +1,4 @@
-use anyhow::{anyhow, Result};
+use anyhow::{anyhow, Context, Result};
use async_compression::futures::bufread::GzipDecoder;
use client::http::{self, HttpClient, Method};
use futures::{future::BoxFuture, FutureExt, StreamExt};
@@ -22,6 +22,7 @@ use util::{ResultExt, TryFutureExt};
struct LanguageDir;
struct RustLsp;
+struct CLsp;
#[derive(Deserialize)]
struct GithubRelease {
@@ -291,6 +292,135 @@ impl LspExt for RustLsp {
}
}
+impl LspExt for CLsp {
+ fn fetch_latest_server_version(
+ &self,
+ http: Arc<dyn HttpClient>,
+ ) -> BoxFuture<'static, Result<LspBinaryVersion>> {
+ async move {
+ let release = http
+ .send(
+ surf::RequestBuilder::new(
+ Method::Get,
+ http::Url::parse(
+ "https://api.github.com/repos/clangd/clangd/releases/latest",
+ )
+ .unwrap(),
+ )
+ .middleware(surf::middleware::Redirect::default())
+ .build(),
+ )
+ .await
+ .map_err(|err| anyhow!("error fetching latest release: {}", err))?
+ .body_json::<GithubRelease>()
+ .await
+ .map_err(|err| anyhow!("error parsing latest release: {}", err))?;
+ let asset_name = format!("clangd-mac-{}.zip", release.name);
+ let asset = release
+ .assets
+ .iter()
+ .find(|asset| asset.name == asset_name)
+ .ok_or_else(|| anyhow!("no release found matching {:?}", asset_name))?;
+ Ok(LspBinaryVersion {
+ name: release.name,
+ url: asset.browser_download_url.clone(),
+ })
+ }
+ .boxed()
+ }
+
+ fn fetch_server_binary(
+ &self,
+ version: LspBinaryVersion,
+ http: Arc<dyn HttpClient>,
+ download_dir: Arc<Path>,
+ ) -> BoxFuture<'static, Result<PathBuf>> {
+ async move {
+ let container_dir = download_dir.join("clangd");
+ fs::create_dir_all(&container_dir)
+ .await
+ .context("failed to create container directory")?;
+
+ let zip_path = container_dir.join(format!("clangd_{}.zip", version.name));
+ let version_dir = container_dir.join(format!("clangd_{}", version.name));
+ let binary_path = version_dir.join("bin/clangd");
+
+ if fs::metadata(&binary_path).await.is_err() {
+ let response = http
+ .send(
+ surf::RequestBuilder::new(Method::Get, version.url)
+ .middleware(surf::middleware::Redirect::default())
+ .build(),
+ )
+ .await
+ .map_err(|err| anyhow!("error downloading release: {}", err))?;
+ let mut file = File::create(&zip_path).await?;
+ if !response.status().is_success() {
+ Err(anyhow!(
+ "download failed with status {}",
+ response.status().to_string()
+ ))?;
+ }
+ futures::io::copy(response, &mut file).await?;
+
+ let unzip_status = smol::process::Command::new("unzip")
+ .current_dir(&container_dir)
+ .arg(&zip_path)
+ .output()
+ .await?
+ .status;
+ if !unzip_status.success() {
+ Err(anyhow!("failed to unzip clangd archive"))?;
+ }
+
+ if let Some(mut entries) = fs::read_dir(&container_dir).await.log_err() {
+ while let Some(entry) = entries.next().await {
+ if let Some(entry) = entry.log_err() {
+ let entry_path = entry.path();
+ if entry_path.as_path() != version_dir {
+ fs::remove_dir_all(&entry_path).await.log_err();
+ }
+ }
+ }
+ }
+ }
+
+ Ok(binary_path)
+ }
+ .boxed()
+ }
+
+ fn cached_server_binary(&self, download_dir: Arc<Path>) -> BoxFuture<'static, Option<PathBuf>> {
+ async move {
+ let destination_dir_path = download_dir.join("clangd");
+ fs::create_dir_all(&destination_dir_path).await?;
+
+ let mut last_clangd_dir = None;
+ let mut entries = fs::read_dir(&destination_dir_path).await?;
+ while let Some(entry) = entries.next().await {
+ let entry = entry?;
+ if entry.file_type().await?.is_dir() {
+ last_clangd_dir = Some(entry.path());
+ }
+ }
+ let clangd_dir = last_clangd_dir.ok_or_else(|| anyhow!("no cached binary"))?;
+ let clangd_bin = clangd_dir.join("bin/clangd");
+ if clangd_bin.exists() {
+ Ok(clangd_bin)
+ } else {
+ Err(anyhow!(
+ "missing clangd binary in directory {:?}",
+ clangd_dir
+ ))
+ }
+ }
+ .log_err()
+ .boxed()
+ }
+
+ fn process_diagnostics(&self, _: &mut lsp::PublishDiagnosticsParams) {}
+}
+
pub fn build_language_registry() -> LanguageRegistry {
let mut languages = LanguageRegistry::new();
languages.set_language_server_download_dir(
@@ -298,6 +428,7 @@ pub fn build_language_registry() -> LanguageRegistry {
.expect("failed to determine home directory")
.join(".zed"),
);
+ languages.add(Arc::new(c()));
languages.add(Arc::new(rust()));
languages.add(Arc::new(markdown()));
languages
@@ -318,6 +449,19 @@ fn rust() -> Language {
.with_lsp_ext(RustLsp)
}
+fn c() -> Language {
+ let grammar = tree_sitter_c::language();
+ let config = toml::from_slice(&LanguageDir::get("c/config.toml").unwrap().data).unwrap();
+ Language::new(config, Some(grammar))
+ .with_highlights_query(load_query("c/highlights.scm").as_ref())
+ .unwrap()
+ .with_indents_query(load_query("c/indents.scm").as_ref())
+ .unwrap()
+ .with_outline_query(load_query("c/outline.scm").as_ref())
+ .unwrap()
+ .with_lsp_ext(CLsp)
+}
+
fn markdown() -> Language {
let grammar = tree_sitter_markdown::language();
let config = toml::from_slice(&LanguageDir::get("markdown/config.toml").unwrap().data).unwrap();