Implement comprehensive diagnostic reporting system: - Add Diagnostic struct with severity levels and span-based error tracking - Add diagnostic rendering with source context and caret positioning - Replace ParseError with diagnostic collection in lexer and parser - Add LexResult and ParseResult types to carry diagnostics Enhance driver crate with frontend output: - Replace CompileResult with FrontendOutput containing diagnostics - Add has_errors() and render_diagnostics() methods - Add AstSummary for
49 lines
1.5 KiB
Rust
49 lines
1.5 KiB
Rust
use nxc_frontend::{Keyword, Lexer, TokenKind};
|
|
|
|
#[test]
|
|
fn lexes_literals_and_operators() {
|
|
let result = Lexer::new("let value = 12.5 + 3 * -2 && true\n").lex();
|
|
assert!(result.diagnostics.is_empty());
|
|
|
|
let kinds: Vec<TokenKind> = result.tokens.into_iter().map(|token| token.kind).collect();
|
|
assert_eq!(
|
|
kinds,
|
|
vec![
|
|
TokenKind::Keyword(Keyword::Let),
|
|
TokenKind::Identifier("value".into()),
|
|
TokenKind::Equal,
|
|
TokenKind::Float("12.5".into()),
|
|
TokenKind::Plus,
|
|
TokenKind::Integer("3".into()),
|
|
TokenKind::Star,
|
|
TokenKind::Minus,
|
|
TokenKind::Integer("2".into()),
|
|
TokenKind::AndAnd,
|
|
TokenKind::Bool(true),
|
|
TokenKind::Newline,
|
|
TokenKind::Eof,
|
|
]
|
|
);
|
|
}
|
|
|
|
#[test]
|
|
fn lexes_indent_and_dedent_tokens() {
|
|
let source = "fn main() -> Int:\n let value = 1\n return value\n";
|
|
let result = Lexer::new(source).lex();
|
|
assert!(result.diagnostics.is_empty());
|
|
|
|
let kinds: Vec<TokenKind> = result.tokens.into_iter().map(|token| token.kind).collect();
|
|
assert!(kinds.iter().any(|kind| matches!(kind, TokenKind::Indent)));
|
|
assert!(kinds.iter().any(|kind| matches!(kind, TokenKind::Dedent)));
|
|
}
|
|
|
|
#[test]
|
|
fn reports_unterminated_string() {
|
|
let result = Lexer::new("let value = \"oops\n").lex();
|
|
assert_eq!(result.diagnostics.len(), 1);
|
|
assert!(result.diagnostics[0]
|
|
.message
|
|
.contains("unterminated string literal"));
|
|
}
|
|
|