diff --git a/src/chumsky.rs b/src/chumsky.rs index 25a4d41..45fe32f 100644 --- a/src/chumsky.rs +++ b/src/chumsky.rs @@ -33,7 +33,7 @@ pub enum Token { Else, } -pub type Ast = HashMap; +pub type Ast = Vec>; impl fmt::Display for Token { fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { @@ -404,7 +404,7 @@ fn expr_parser() -> impl Parser, Error = Simple> + C }) } -pub fn funcs_parser() -> impl Parser, Error = Simple> + Clone { +pub fn funcs_parser() -> impl Parser>, Error = Simple> + Clone { let ident = filter_map(|span, tok| match tok { Token::Ident(ident) => Ok(ident), _ => Err(Simple::expected_input_found(span, Vec::new(), Some(tok))), @@ -454,16 +454,10 @@ pub fn funcs_parser() -> impl Parser, Error = Simpl func.repeated() .try_map(|fs, _| { - let mut funcs = HashMap::new(); - for ((name, name_span), f) in fs { - if funcs.insert(name.clone(), f).is_some() { - return Err(Simple::custom( - name_span, - format!("Function '{}' already exists", name), - )); - } - } - Ok(funcs) + Ok(fs + .into_iter() + .map(|item| (item.1, item.0 .1)) + .collect::>()) }) .then_ignore(end()) } @@ -500,7 +494,7 @@ pub fn type_inference(expr: &Spanned, symbol_type_table: &mut HashMap>, + pub ast: Option>>, pub parse_errors: Vec>, pub semantic_tokens: Vec, } diff --git a/src/completion.rs b/src/completion.rs index 232ab2a..0717c68 100644 --- a/src/completion.rs +++ b/src/completion.rs @@ -7,33 +7,37 @@ pub enum ImCompleteCompletionItem { } /// return (need_to_continue_search, founded reference) pub fn completion( - ast: &HashMap, + ast: &[Spanned], ident_offset: usize, ) -> HashMap { let mut map = HashMap::new(); - for (_, v) in ast.iter() { - if v.name.1.end < ident_offset { + for (func, _) in ast.iter() { + if func.name.1.end < ident_offset { map.insert( - v.name.0.clone(), + func.name.0.clone(), ImCompleteCompletionItem::Function( - v.name.0.clone(), - v.args.clone().into_iter().map(|(name, _)| name).collect(), + func.name.0.clone(), + func.args + .clone() + .into_iter() + .map(|(name, _)| name) + .collect(), ), ); } } // collect params variable - for (_, v) in ast.iter() { - if v.span.end > ident_offset && v.span.start < ident_offset { + for (func, _) in ast.iter() { + if func.span.end > ident_offset && func.span.start < ident_offset { // log::debug!("this is completion from body {}", name); - v.args.iter().for_each(|(item, _)| { + func.args.iter().for_each(|(item, _)| { map.insert( item.clone(), ImCompleteCompletionItem::Variable(item.clone()), ); }); - get_completion_of(&v.body, &mut map, ident_offset); + get_completion_of(&func.body, &mut map, ident_offset); } } map diff --git a/src/lib.rs b/src/lib.rs index 0595762..92dc3d7 100644 --- a/src/lib.rs +++ b/src/lib.rs @@ -1,7 +1,5 @@ pub mod chumsky; pub mod completion; -pub mod jump_definition; -pub mod reference; pub mod semantic_analyze; pub mod semantic_token; pub mod span; diff --git a/src/main.rs b/src/main.rs index 3d500bd..2e9d893 100644 --- a/src/main.rs +++ b/src/main.rs @@ -6,9 +6,8 @@ use nrs_language_server::chumsky::{ parse, type_inference, Ast, ImCompleteSemanticToken, ParserResult, }; use nrs_language_server::completion::completion; -use nrs_language_server::reference::get_reference; use nrs_language_server::semantic_analyze::{analyze_program, IdentType, Semantic}; -use nrs_language_server::semantic_token::{semantic_token_from_ast, LEGEND_TYPE}; +use nrs_language_server::semantic_token::LEGEND_TYPE; use nrs_language_server::span::Span; use ropey::Rope; use serde::{Deserialize, Serialize}; @@ -34,8 +33,15 @@ impl LanguageServer for Backend { offset_encoding: None, capabilities: ServerCapabilities { inlay_hint_provider: Some(OneOf::Left(true)), - text_document_sync: Some(TextDocumentSyncCapability::Kind( - TextDocumentSyncKind::FULL, + text_document_sync: Some(TextDocumentSyncCapability::Options( + TextDocumentSyncOptions { + open_close: Some(true), + change: Some(TextDocumentSyncKind::FULL), + save: Some(TextDocumentSyncSaveOptions::SaveOptions(SaveOptions { + include_text: Some(true), + })), + ..Default::default() + }, )), completion_provider: Some(CompletionOptions { resolve_provider: Some(false), @@ -102,7 +108,7 @@ impl LanguageServer for Backend { self.on_change(TextDocumentItem { uri: params.text_document.uri, text: ¶ms.text_document.text, - version: params.text_document.version, + version: Some(params.text_document.version), }) .await } @@ -111,12 +117,22 @@ impl LanguageServer for Backend { self.on_change(TextDocumentItem { text: ¶ms.content_changes[0].text, uri: params.text_document.uri, - version: params.text_document.version, + version: Some(params.text_document.version), }) .await } - async fn did_save(&self, _: DidSaveTextDocumentParams) { + async fn did_save(&self, params: DidSaveTextDocumentParams) { + dbg!(¶ms.text); + if let Some(text) = params.text { + let item = TextDocumentItem { + uri: params.text_document.uri, + text: &text, + version: None, + }; + self.on_change(item).await; + _ = self.client.semantic_tokens_refresh().await; + } debug!("file saved!"); } async fn did_close(&self, _: DidCloseTextDocumentParams) { @@ -127,7 +143,7 @@ impl LanguageServer for Backend { &self, params: GotoDefinitionParams, ) -> Result> { - let definition = async { + let definition = || -> Option { let uri = params.text_document_position_params.text_document.uri; let semantic = self.semantic_map.get(uri.as_str())?; let rope = self.document_map.get(uri.as_str())?; @@ -157,10 +173,10 @@ impl LanguageServer for Backend { Range::new(start_position, end_position), ))) }) - } - .await; + }(); Ok(definition) } + async fn references(&self, params: ReferenceParams) -> Result>> { let reference_list = || -> Option> { let uri = params.text_document_position.text_document.uri; @@ -168,24 +184,8 @@ impl LanguageServer for Backend { let rope = self.document_map.get(uri.as_str())?; let position = params.text_document_position.position; let offset = position_to_offset(position, &rope)?; + let reference_span_list = get_references(&semantic, offset, offset + 1, false)?; - let interval = semantic.ident_range.find(offset, offset + 1).next()?; - let interval_val = interval.val; - let reference_span_list = match interval_val { - IdentType::Binding(symbol_id) => { - let references = semantic.table.symbol_id_to_references.get(&symbol_id)?; - let reference_span_list: Vec = references - .iter() - .map(|reference_id| { - semantic.table.reference_id_to_reference[*reference_id] - .span - .clone() - }) - .collect(); - Some(reference_span_list) - } - IdentType::Reference(_) => None, - }?; let ret = reference_span_list .into_iter() .filter_map(|range| { @@ -211,9 +211,6 @@ impl LanguageServer for Backend { let semantic_tokens = || -> Option> { let mut im_complete_tokens = self.semantic_token_map.get_mut(&uri)?; let rope = self.document_map.get(&uri)?; - let ast = self.ast_map.get(&uri)?; - let extends_tokens = semantic_token_from_ast(&ast); - im_complete_tokens.extend(extends_tokens); im_complete_tokens.sort_by(|a, b| a.start.cmp(&b.start)); let mut pre_line = 0; let mut pre_start = 0; @@ -302,8 +299,8 @@ impl LanguageServer for Backend { let uri = ¶ms.text_document.uri; let mut hashmap = HashMap::new(); if let Some(ast) = self.ast_map.get(uri.as_str()) { - ast.iter().for_each(|(_, v)| { - type_inference(&v.body, &mut hashmap); + ast.iter().for_each(|(func, _)| { + type_inference(&func.body, &mut hashmap); }); } @@ -408,18 +405,17 @@ impl LanguageServer for Backend { async fn rename(&self, params: RenameParams) -> Result> { let workspace_edit = || -> Option { let uri = params.text_document_position.text_document.uri; - let ast = self.ast_map.get(&uri.to_string())?; - let rope = self.document_map.get(&uri.to_string())?; - + let semantic = self.semantic_map.get(uri.as_str())?; + let rope = self.document_map.get(uri.as_str())?; let position = params.text_document_position.position; - let char = rope.try_line_to_char(position.line as usize).ok()?; - let offset = char + position.character as usize; - let reference_list = get_reference(&ast, offset, true); + let offset = position_to_offset(position, &rope)?; + let reference_list = get_references(&semantic, offset, offset + 1, true)?; + let new_name = params.new_name; - if !reference_list.is_empty() { + (!reference_list.is_empty()).then_some(()).map(|_| { let edit_list = reference_list .into_iter() - .filter_map(|(_, range)| { + .filter_map(|range| { let start_position = offset_to_position(range.start, &rope)?; let end_position = offset_to_position(range.end, &rope)?; Some(TextEdit::new( @@ -430,11 +426,8 @@ impl LanguageServer for Backend { .collect::>(); let mut map = HashMap::new(); map.insert(uri, edit_list); - let workspace_edit = WorkspaceEdit::new(map); - Some(workspace_edit) - } else { - None - } + WorkspaceEdit::new(map) + }) }(); Ok(workspace_edit) } @@ -477,11 +470,12 @@ impl Notification for CustomNotification { struct TextDocumentItem<'a> { uri: Url, text: &'a str, - version: i32, + version: Option, } impl Backend { async fn on_change<'a>(&self, params: TextDocumentItem<'a>) { + dbg!(¶ms.version); let rope = ropey::Rope::from_str(params.text); self.document_map .insert(params.uri.to_string(), rope.clone()); @@ -490,7 +484,7 @@ impl Backend { parse_errors, semantic_tokens, } = parse(params.text); - let diagnostics = parse_errors + let mut diagnostics = parse_errors .into_iter() .filter_map(|item| { let (message, span) = match item.reason() { @@ -531,13 +525,11 @@ impl Backend { }) .collect::>(); - self.client - .publish_diagnostics(params.uri.clone(), diagnostics, Some(params.version)) - .await; - if let Some(ast) = ast { - let semantic = match analyze_program(&ast) { - Ok(semantic) => semantic, + match analyze_program(&ast) { + Ok(semantic) => { + self.semantic_map.insert(params.uri.to_string(), semantic); + } Err(err) => { let span = err.span(); let start_position = offset_to_position(span.start, &rope); @@ -548,20 +540,16 @@ impl Backend { Diagnostic::new_simple(Range::new(start, end), format!("{:?}", err)) }); if let Some(diag) = diag { - self.client - .publish_diagnostics( - params.uri.clone(), - vec![diag], - Some(params.version), - ) - .await; + diagnostics.push(diag); } - return; } }; - self.semantic_map.insert(params.uri.to_string(), semantic); self.ast_map.insert(params.uri.to_string(), ast); } + + self.client + .publish_diagnostics(params.uri.clone(), diagnostics, params.version) + .await; self.semantic_token_map .insert(params.uri.to_string(), semantic_tokens); } @@ -583,7 +571,6 @@ async fn main() { }) .finish(); - serde_json::json!({"test": 20}); Server::new(stdin, stdout, socket).serve(service).await; } @@ -599,3 +586,32 @@ fn position_to_offset(position: Position, rope: &Rope) -> Option { let slice = rope.slice(0..line_char_offset + position.character as usize); Some(slice.len_bytes()) } + +fn get_references( + semantic: &Semantic, + start: usize, + end: usize, + include_definition: bool, +) -> Option> { + let interval = semantic.ident_range.find(start, end).next()?; + let interval_val = interval.val; + match interval_val { + IdentType::Binding(symbol_id) => { + let references = semantic.table.symbol_id_to_references.get(&symbol_id)?; + let mut reference_span_list: Vec = references + .iter() + .map(|reference_id| { + semantic.table.reference_id_to_reference[*reference_id] + .span + .clone() + }) + .collect(); + if include_definition { + let symbol_range = semantic.table.symbol_id_to_span.get(symbol_id)?; + reference_span_list.push(symbol_range.clone()); + } + Some(reference_span_list) + } + IdentType::Reference(_) => None, + } +} diff --git a/src/semantic_analyze.rs b/src/semantic_analyze.rs index eb29446..0da5cfe 100644 --- a/src/semantic_analyze.rs +++ b/src/semantic_analyze.rs @@ -76,6 +76,7 @@ pub struct Function { pub params: Vec, } +#[derive(Debug)] pub struct Ctx { env: im_rc::Vector<(String, Span)>, table: SymbolTable, @@ -94,7 +95,7 @@ pub fn analyze_program(ast: &Ast) -> Result { let table = SymbolTable::default(); let env = im_rc::Vector::new(); let mut ctx = Ctx { env, table }; - for (_, func) in ast.iter() { + for (func, _) in ast.iter() { let name = func.name.0.clone(); ctx.env.push_back((name, func.name.1.clone())); ctx.table.add_symbol(func.name.1.clone()); @@ -139,10 +140,11 @@ fn analyze_expr(expr: &Expr, ctx: &mut Ctx) -> Result<()> { let span = match ctx.find_symbol(&name.0) { Some(ty) => ty, None => { + dbg!(&ctx); return Err(SemanticError::UndefinedVariable { name: name.0.clone(), span: name.1.clone(), - }) + }); } }; let symbol_id = *ctx.table.span_to_symbol_id.get(&span).unwrap();