Skip to content

Commit

Permalink
feat: Add more tokens
Browse files Browse the repository at this point in the history
  • Loading branch information
sbwtw committed Nov 2, 2024
1 parent afb345e commit 315acda
Show file tree
Hide file tree
Showing 3 changed files with 66 additions and 33 deletions.
6 changes: 6 additions & 0 deletions lib/src/parser/lexer.rs
Original file line number Diff line number Diff line change
Expand Up @@ -179,6 +179,12 @@ impl StLexerBuilder {
TokenKind::Then,
TokenKind::ElseIf,
TokenKind::EndIf,
TokenKind::For,
TokenKind::EndFor,
TokenKind::By,
TokenKind::Do,
TokenKind::Continue,
TokenKind::Break,
TokenKind::Function,
TokenKind::EndFunction,
TokenKind::Program,
Expand Down
42 changes: 42 additions & 0 deletions lib/src/parser/token.rs
Original file line number Diff line number Diff line change
Expand Up @@ -117,6 +117,18 @@ pub enum TokenKind {
EndIf,
/// 'TO'
To,
/// 'FOR'
For,
/// 'BY'
By,
/// 'END_FOR'
EndFor,
/// 'CONTINUE'
Continue,
/// 'BREAK'
Break,
/// 'DO'
Do,
/// 'FUNCTION'
Function,
/// 'END_FUNCTION'
Expand Down Expand Up @@ -216,6 +228,30 @@ impl TokenKind {
)
}

#[inline]
pub fn is_keywords(&self) -> bool {
matches!(
*self,
TokenKind::If
| TokenKind::Else
| TokenKind::ElseIf
| TokenKind::EndIf
| TokenKind::For
| TokenKind::EndFor
| TokenKind::By
| TokenKind::Break
| TokenKind::Do
| TokenKind::Continue
| TokenKind::Program
| TokenKind::EndProgram
| TokenKind::Var
| TokenKind::VarGlobal
| TokenKind::Then
| TokenKind::Array
| TokenKind::EndVar
)
}

pub fn kind_match(&self, rhs: &TokenKind) -> bool {
match *self {
TokenKind::AssignRight => matches!(rhs, TokenKind::AssignRight),
Expand Down Expand Up @@ -365,6 +401,12 @@ impl From<&TokenKind> for String {
TokenKind::Time => "TIME",
TokenKind::LTime => "LTIME",
TokenKind::String => "STRING",
TokenKind::For => "FOR",
TokenKind::By => "BY",
TokenKind::EndFor => "END_FOR",
TokenKind::Continue => "CONTINUE",
TokenKind::Break => "BREAK",
TokenKind::Do => "DO",
TokenKind::Literal(x) => {
tmp_string = format!("{}", x);
tmp_string.as_str()
Expand Down
51 changes: 18 additions & 33 deletions lsp/src/lsp.rs
Original file line number Diff line number Diff line change
Expand Up @@ -16,34 +16,28 @@ fn semantic_token_type_id(tok: &TokenKind) -> (u32, u32) {
TokenKind::String => (TokenTypes::String as u32, TokenModifiers::None as u32),
// operators
op if op.is_operator() => (TokenTypes::Operator as u32, TokenModifiers::None as u32),
// builtin-types
TokenKind::Int => (TokenTypes::Type as u32, TokenModifiers::None as u32),
// builtin-operators
TokenKind::SizeOf | TokenKind::Adr => (
TokenTypes::BuiltinFunction as u32,
TokenModifiers::None as u32,
),
// builtin-types
_ if tok.is_type() => (TokenTypes::Type as u32, TokenModifiers::None as u32),
// keywords
TokenKind::If
| TokenKind::Then
| TokenKind::EndIf
| TokenKind::Var
| TokenKind::EndVar
| TokenKind::Program
| TokenKind::EndProgram => (TokenTypes::Keyword as u32, TokenModifiers::None as u32),
_ if tok.is_keywords() => (TokenTypes::Keyword as u32, TokenModifiers::None as u32),
_ => (TokenTypes::None as u32, TokenModifiers::None as u32),
}
}

pub struct StcLsp {
_client: Client,
client: Client,
src_mgr: DashMap<Url, Rope>,
}

impl StcLsp {
pub fn new(c: Client) -> Self {
Self {
_client: c,
client: c,
src_mgr: DashMap::new(),
}
}
Expand Down Expand Up @@ -89,6 +83,10 @@ impl LanguageServer for StcLsp {
}

async fn shutdown(&self) -> Result<()> {
self.client
.show_message(MessageType::INFO, "shutdown")
.await;

Ok(())
}

Expand All @@ -101,7 +99,7 @@ impl LanguageServer for StcLsp {
}

async fn did_change(&self, params: DidChangeTextDocumentParams) {
trace!("{:?}", params);
trace!("did_change: {}", params.text_document.uri);

for change in params.content_changes.into_iter() {
// Only full text support
Expand All @@ -113,7 +111,11 @@ impl LanguageServer for StcLsp {
}

async fn did_save(&self, params: DidSaveTextDocumentParams) {
trace!("{:?}", params);
trace!("did_save: {}", params.text_document.uri);

if let Some(content) = params.text {
self.on_file_change(&params.text_document.uri, content)
}
}

async fn did_close(&self, params: DidCloseTextDocumentParams) {
Expand All @@ -125,26 +127,9 @@ impl LanguageServer for StcLsp {

async fn document_highlight(
&self,
params: DocumentHighlightParams,
_params: DocumentHighlightParams,
) -> Result<Option<Vec<DocumentHighlight>>> {
trace!("{:?}", params.text_document_position_params);

// let mut highlights = Vec::with_capacity(64);
// highlights.push(DocumentHighlight {
// range: Range {
// start: Position {
// line: 0,
// character: 0,
// },
// end: Position {
// line: 0,
// character: 3,
// },
// },
// kind: None,
// });

// Ok(Some(highlights))
// trace!("{:?}", params.text_document_position_params);

Ok(None)
}
Expand All @@ -153,7 +138,7 @@ impl LanguageServer for StcLsp {
&self,
params: SemanticTokensParams,
) -> Result<Option<SemanticTokensResult>> {
trace!("{:?}", params);
trace!("tokens_full: {}", params.text_document.uri);

let s = self.src_mgr.get(&params.text_document.uri).unwrap();
let lexer = StLexerBuilder::new().build_iter(s.chars());
Expand Down

0 comments on commit 315acda

Please sign in to comment.