use std::collections::BTreeMap;
use tower_lsp::lsp_types::SemanticToken;
use crate::ast::File;
use crate::syntax::lexeme::Lexeme;
use crate::syntax::lexer::{Comment, Lexer};
use crate::syntax::span::Spanned;
#[derive(Clone, Copy, Debug, PartialEq, Eq)]
pub(crate) enum NameKind {
Function,
Type,
Parameter,
Variable,
Constant,
EventName,
Property,
}
pub(crate) struct DocumentData {
pub source: String,
pub tokens: Vec<Spanned<Lexeme>>,
pub comments: Vec<Comment>,
pub line_starts: Vec<usize>,
pub name_kinds: BTreeMap<String, (NameKind, u32)>,
pub cached_ast: Option<File>,
pub last_semantic_tokens: Vec<SemanticToken>,
pub result_version: u64,
}
impl DocumentData {
pub fn new(source: String) -> Self {
let line_starts = compute_line_starts(&source);
let (tokens, comments, _diagnostics) = Lexer::new(&source, 0).tokenize();
Self {
source,
tokens,
comments,
line_starts,
name_kinds: BTreeMap::new(),
cached_ast: None,
last_semantic_tokens: Vec::new(),
result_version: 0,
}
}
pub fn result_id(&self) -> String {
self.result_version.to_string()
}
}
pub(super) fn compute_line_starts(source: &str) -> Vec<usize> {
let mut starts = vec![0];
for (i, b) in source.bytes().enumerate() {
if b == b'\n' {
starts.push(i + 1);
}
}
starts
}