chore: initialize NexaCore compiler workspace with basic frontend and CLI

Add initial project structure for NexaCore programming language compiler:
- Create Cargo workspace with 4 crates (cli, driver, frontend, runtime)
- Add lexer with indentation-based tokenization and keyword support
- Add parser for modules, functions, structs, and basic expressions
- Implement CLI with build command and placeholder subcommands
- Add driver crate to orchestrate compilation pipeline
- Include .gitignore for Rust build
This commit is contained in:
2026-04-06 16:57:54 +02:00
commit 0da224325a
17 changed files with 1704 additions and 0 deletions

14
crates/nxc-cli/Cargo.toml Normal file
View File

@@ -0,0 +1,14 @@
[package]
name = "nxc-cli"
version.workspace = true
edition.workspace = true
license.workspace = true
authors.workspace = true
[[bin]]
name = "nexacore"
path = "src/main.rs"
[dependencies]
nxc-driver = { path = "../nxc-driver" }

View File

@@ -0,0 +1,65 @@
use std::env;
use std::path::Path;
use std::process::ExitCode;
fn main() -> ExitCode {
match run() {
Ok(()) => ExitCode::SUCCESS,
Err(message) => {
eprintln!("{message}");
ExitCode::FAILURE
}
}
}
fn run() -> Result<(), String> {
let mut args = env::args().skip(1);
let Some(command) = args.next() else {
print_help();
return Ok(());
};
match command.as_str() {
"build" => {
let Some(path) = args.next() else {
return Err("usage: nexacore build <file.nx>".to_string());
};
let result = nxc_driver::compile_file(Path::new(&path))
.map_err(format_compile_error)?;
println!("compiled {path}");
println!("tokens: {}", result.tokens.len());
println!("items: {}", result.module.items.len());
Ok(())
}
"run" => Err("runtime execution is not implemented yet".to_string()),
"new" => Err("project scaffolding is not implemented yet".to_string()),
"test" => Err("test runner is not implemented yet".to_string()),
"fmt" => Err("formatter is not implemented yet".to_string()),
"add" => Err("package manager is not implemented yet".to_string()),
"doc" => Err("docs generator is not implemented yet".to_string()),
_ => Err(format!("unknown command: {command}")),
}
}
fn format_compile_error(error: nxc_driver::CompileError) -> String {
match error {
nxc_driver::CompileError::Io(io) => format!("io error: {io}"),
nxc_driver::CompileError::Parse(parse) => format!(
"parse error at line {}, column {}: {}",
parse.span.line, parse.span.column, parse.message
),
}
}
fn print_help() {
println!("NexaCore CLI");
println!("usage:");
println!(" nexacore build <file.nx>");
println!(" nexacore run <file.nx>");
println!(" nexacore new <name>");
println!(" nexacore test");
println!(" nexacore fmt");
println!(" nexacore add <package>");
println!(" nexacore doc");
}

View File

@@ -0,0 +1,13 @@
[package]
name = "nxc-driver"
version.workspace = true
edition.workspace = true
license.workspace = true
authors.workspace = true
[lib]
path = "src/lib.rs"
[dependencies]
nxc-frontend = { path = "../nxc-frontend" }

View File

@@ -0,0 +1,41 @@
use std::fs;
use std::path::Path;
use nxc_frontend::{Lexer, Module, ParseError, Parser, Token};
#[derive(Debug)]
pub struct CompileResult {
pub tokens: Vec<Token>,
pub module: Module,
}
#[derive(Debug)]
pub enum CompileError {
Io(std::io::Error),
Parse(ParseError),
}
impl From<std::io::Error> for CompileError {
fn from(value: std::io::Error) -> Self {
Self::Io(value)
}
}
impl From<ParseError> for CompileError {
fn from(value: ParseError) -> Self {
Self::Parse(value)
}
}
pub fn compile_file(path: impl AsRef<Path>) -> Result<CompileResult, CompileError> {
let source = fs::read_to_string(path)?;
compile_source(&source)
}
pub fn compile_source(source: &str) -> Result<CompileResult, CompileError> {
let tokens = Lexer::new(source).tokenize();
let mut parser = Parser::new(tokens.clone());
let module = parser.parse_module()?;
Ok(CompileResult { tokens, module })
}

View File

@@ -0,0 +1,10 @@
[package]
name = "nxc-frontend"
version.workspace = true
edition.workspace = true
license.workspace = true
authors.workspace = true
[lib]
path = "src/lib.rs"

View File

@@ -0,0 +1,89 @@
use crate::token::Span;
#[derive(Debug, Clone, Default)]
pub struct Module {
pub items: Vec<Item>,
}
#[derive(Debug, Clone)]
pub enum Item {
Use(UseDecl),
Function(FunctionDecl),
Struct(StructDecl),
}
#[derive(Debug, Clone)]
pub struct UseDecl {
pub path: Vec<String>,
pub span: Span,
}
#[derive(Debug, Clone)]
pub struct FunctionDecl {
pub is_public: bool,
pub is_async: bool,
pub name: String,
pub params: Vec<Param>,
pub return_type: Option<TypeRef>,
pub body: Block,
pub span: Span,
}
#[derive(Debug, Clone)]
pub struct Param {
pub name: String,
pub ty: TypeRef,
pub span: Span,
}
#[derive(Debug, Clone)]
pub struct StructDecl {
pub is_public: bool,
pub name: String,
pub fields: Vec<FieldDecl>,
pub span: Span,
}
#[derive(Debug, Clone)]
pub struct FieldDecl {
pub name: String,
pub ty: TypeRef,
pub span: Span,
}
#[derive(Debug, Clone, Default)]
pub struct Block {
pub statements: Vec<Stmt>,
}
#[derive(Debug, Clone)]
pub enum Stmt {
Let {
mutable: bool,
name: String,
ty: Option<TypeRef>,
value: Expr,
span: Span,
},
Expr(Expr),
Return(Option<Expr>, Span),
}
#[derive(Debug, Clone)]
pub enum Expr {
Identifier(String, Span),
Integer(i64, Span),
String(String, Span),
Call {
callee: Box<Expr>,
args: Vec<Expr>,
span: Span,
},
}
#[derive(Debug, Clone)]
pub struct TypeRef {
pub name: String,
pub span: Span,
}

View File

@@ -0,0 +1,360 @@
use crate::token::{Keyword, Span, Token, TokenKind};
#[derive(Debug)]
pub struct Lexer<'src> {
chars: Vec<char>,
position: usize,
line: usize,
column: usize,
indent_stack: Vec<usize>,
pending_dedents: usize,
at_line_start: bool,
finished: bool,
_source: &'src str,
}
impl<'src> Lexer<'src> {
pub fn new(source: &'src str) -> Self {
Self {
chars: source.chars().collect(),
position: 0,
line: 1,
column: 1,
indent_stack: vec![0],
pending_dedents: 0,
at_line_start: true,
finished: false,
_source: source,
}
}
pub fn tokenize(mut self) -> Vec<Token> {
let mut tokens = Vec::new();
while let Some(token) = self.next_token() {
let is_eof = matches!(token.kind, TokenKind::Eof);
tokens.push(token);
if is_eof {
break;
}
}
tokens
}
fn next_token(&mut self) -> Option<Token> {
if self.finished {
return None;
}
if self.pending_dedents > 0 {
self.pending_dedents -= 1;
return Some(self.make_token(TokenKind::Dedent, self.position, self.position));
}
if self.at_line_start {
let indent = self.consume_indentation();
let current = *self.indent_stack.last().unwrap_or(&0);
if indent > current {
self.indent_stack.push(indent);
self.at_line_start = false;
return Some(self.make_token(TokenKind::Indent, self.position, self.position));
}
if indent < current {
while let Some(&last) = self.indent_stack.last() {
if indent < last {
self.indent_stack.pop();
self.pending_dedents += 1;
} else {
break;
}
}
self.at_line_start = false;
if self.pending_dedents > 0 {
self.pending_dedents -= 1;
return Some(self.make_token(TokenKind::Dedent, self.position, self.position));
}
}
self.at_line_start = false;
}
self.skip_inline_whitespace();
let start = self.position;
let line = self.line;
let column = self.column;
let ch = match self.peek() {
Some(ch) => ch,
None => {
if self.indent_stack.len() > 1 {
self.indent_stack.pop();
return Some(Token::new(
TokenKind::Dedent,
Span::new(start, start, line, column),
));
}
self.finished = true;
return Some(Token::new(
TokenKind::Eof,
Span::new(start, start, line, column),
));
}
};
if ch == '\n' {
self.bump();
self.at_line_start = true;
return Some(Token::new(
TokenKind::Newline,
Span::new(start, self.position, line, column),
));
}
if ch == '"' {
return Some(self.lex_string());
}
if ch.is_ascii_digit() {
return Some(self.lex_number());
}
if is_ident_start(ch) {
return Some(self.lex_identifier());
}
let token = match ch {
'(' => single(self, TokenKind::LeftParen),
')' => single(self, TokenKind::RightParen),
'{' => single(self, TokenKind::LeftBrace),
'}' => single(self, TokenKind::RightBrace),
'[' => single(self, TokenKind::LeftBracket),
']' => single(self, TokenKind::RightBracket),
',' => single(self, TokenKind::Comma),
'.' => single(self, TokenKind::Dot),
':' => single(self, TokenKind::Colon),
'+' => single(self, TokenKind::Plus),
'*' => single(self, TokenKind::Star),
'/' => single(self, TokenKind::Slash),
'%' => single(self, TokenKind::Percent),
'?' => single(self, TokenKind::Question),
'-' => {
self.bump();
if self.peek() == Some('>') {
self.bump();
Token::new(TokenKind::Arrow, Span::new(start, self.position, line, column))
} else {
Token::new(TokenKind::Minus, Span::new(start, self.position, line, column))
}
}
'=' => {
self.bump();
match self.peek() {
Some('=') => {
self.bump();
Token::new(
TokenKind::EqualEqual,
Span::new(start, self.position, line, column),
)
}
Some('>') => {
self.bump();
Token::new(
TokenKind::FatArrow,
Span::new(start, self.position, line, column),
)
}
_ => Token::new(TokenKind::Equal, Span::new(start, self.position, line, column)),
}
}
'!' => {
self.bump();
if self.peek() == Some('=') {
self.bump();
Token::new(
TokenKind::BangEqual,
Span::new(start, self.position, line, column),
)
} else {
Token::new(TokenKind::Bang, Span::new(start, self.position, line, column))
}
}
'<' => {
self.bump();
if self.peek() == Some('=') {
self.bump();
Token::new(
TokenKind::LessEqual,
Span::new(start, self.position, line, column),
)
} else {
Token::new(TokenKind::Less, Span::new(start, self.position, line, column))
}
}
'>' => {
self.bump();
if self.peek() == Some('=') {
self.bump();
Token::new(
TokenKind::GreaterEqual,
Span::new(start, self.position, line, column),
)
} else {
Token::new(TokenKind::Greater, Span::new(start, self.position, line, column))
}
}
_ => {
self.bump();
Token::new(TokenKind::Newline, Span::new(start, self.position, line, column))
}
};
Some(token)
}
fn consume_indentation(&mut self) -> usize {
let mut indent = 0;
loop {
match self.peek() {
Some(' ') => {
self.bump();
indent += 1;
}
Some('\t') => {
self.bump();
indent += 4;
}
Some('\n') => return 0,
Some('#') => {
while let Some(ch) = self.peek() {
self.bump();
if ch == '\n' {
self.at_line_start = true;
break;
}
}
return 0;
}
_ => break,
}
}
indent
}
fn skip_inline_whitespace(&mut self) {
while let Some(ch) = self.peek() {
if ch == ' ' || ch == '\t' || ch == '\r' {
self.bump();
continue;
}
if ch == '#' {
while let Some(comment) = self.peek() {
self.bump();
if comment == '\n' {
self.at_line_start = true;
break;
}
}
continue;
}
break;
}
}
fn lex_identifier(&mut self) -> Token {
let start = self.position;
let line = self.line;
let column = self.column;
let mut value = String::new();
while let Some(ch) = self.peek() {
if is_ident_continue(ch) {
value.push(ch);
self.bump();
} else {
break;
}
}
let kind = match Keyword::from_ident(&value) {
Some(keyword) => TokenKind::Keyword(keyword),
None => TokenKind::Identifier(value),
};
Token::new(kind, Span::new(start, self.position, line, column))
}
fn lex_number(&mut self) -> Token {
let start = self.position;
let line = self.line;
let column = self.column;
let mut value = String::new();
while let Some(ch) = self.peek() {
if ch.is_ascii_digit() {
value.push(ch);
self.bump();
} else {
break;
}
}
Token::new(
TokenKind::Integer(value),
Span::new(start, self.position, line, column),
)
}
fn lex_string(&mut self) -> Token {
let start = self.position;
let line = self.line;
let column = self.column;
self.bump();
let mut value = String::new();
while let Some(ch) = self.peek() {
self.bump();
if ch == '"' {
break;
}
value.push(ch);
}
Token::new(
TokenKind::String(value),
Span::new(start, self.position, line, column),
)
}
fn peek(&self) -> Option<char> {
self.chars.get(self.position).copied()
}
fn bump(&mut self) {
if let Some(ch) = self.peek() {
self.position += 1;
if ch == '\n' {
self.line += 1;
self.column = 1;
} else {
self.column += 1;
}
}
}
fn make_token(&self, kind: TokenKind, start: usize, end: usize) -> Token {
Token::new(kind, Span::new(start, end, self.line, self.column))
}
}
fn single(lexer: &mut Lexer<'_>, kind: TokenKind) -> Token {
let start = lexer.position;
let line = lexer.line;
let column = lexer.column;
lexer.bump();
Token::new(kind, Span::new(start, lexer.position, line, column))
}
fn is_ident_start(ch: char) -> bool {
ch.is_ascii_alphabetic() || ch == '_'
}
fn is_ident_continue(ch: char) -> bool {
is_ident_start(ch) || ch.is_ascii_digit()
}

View File

@@ -0,0 +1,10 @@
pub mod ast;
pub mod lexer;
pub mod parser;
pub mod token;
pub use ast::Module;
pub use lexer::Lexer;
pub use parser::{ParseError, Parser};
pub use token::{Keyword, Span, Token, TokenKind};

View File

@@ -0,0 +1,332 @@
use crate::ast::{
Block, Expr, FieldDecl, FunctionDecl, Item, Module, Param, Stmt, StructDecl, TypeRef, UseDecl,
};
use crate::token::{Keyword, Span, Token, TokenKind};
#[derive(Debug, Clone)]
pub struct ParseError {
pub message: String,
pub span: Span,
}
pub struct Parser {
tokens: Vec<Token>,
current: usize,
}
impl Parser {
pub fn new(tokens: Vec<Token>) -> Self {
Self { tokens, current: 0 }
}
pub fn parse_module(&mut self) -> Result<Module, ParseError> {
let mut items = Vec::new();
while !self.is_at_end() {
self.skip_newlines();
if self.is_at_end() {
break;
}
items.push(self.parse_item()?);
self.skip_newlines();
}
Ok(Module { items })
}
fn parse_item(&mut self) -> Result<Item, ParseError> {
if self.matches_keyword(Keyword::Use) {
return self.parse_use().map(Item::Use);
}
let is_public = self.matches_keyword(Keyword::Pub);
let is_async = self.matches_keyword(Keyword::Async);
if self.matches_keyword(Keyword::Fn) {
return self.parse_function(is_public, is_async).map(Item::Function);
}
if self.matches_keyword(Keyword::Struct) {
return self.parse_struct(is_public).map(Item::Struct);
}
Err(self.error_here("expected module item"))
}
fn parse_use(&mut self) -> Result<UseDecl, ParseError> {
let start = self.previous_span();
let mut path = Vec::new();
path.push(self.expect_identifier()?);
while self.matches(TokenKind::Dot) {
path.push(self.expect_identifier()?);
}
Ok(UseDecl { path, span: start })
}
fn parse_function(
&mut self,
is_public: bool,
is_async: bool,
) -> Result<FunctionDecl, ParseError> {
let start = self.previous_span();
let name = self.expect_identifier()?;
self.expect(TokenKind::LeftParen, "expected '(' after function name")?;
let mut params = Vec::new();
if !self.check(&TokenKind::RightParen) {
loop {
let param_name = self.expect_identifier()?;
self.expect(TokenKind::Colon, "expected ':' after parameter name")?;
let ty = self.parse_type()?;
params.push(Param {
name: param_name,
ty,
span: start,
});
if !self.matches(TokenKind::Comma) {
break;
}
}
}
self.expect(TokenKind::RightParen, "expected ')' after parameters")?;
let return_type = if self.matches(TokenKind::Arrow) {
Some(self.parse_type()?)
} else {
None
};
self.expect(TokenKind::Colon, "expected ':' before function body")?;
self.skip_newlines();
let body = self.parse_block()?;
Ok(FunctionDecl {
is_public,
is_async,
name,
params,
return_type,
body,
span: start,
})
}
fn parse_struct(&mut self, is_public: bool) -> Result<StructDecl, ParseError> {
let start = self.previous_span();
let name = self.expect_identifier()?;
self.expect(TokenKind::Colon, "expected ':' after struct name")?;
self.skip_newlines();
self.expect(TokenKind::Indent, "expected indented struct body")?;
let mut fields = Vec::new();
while !self.check(&TokenKind::Dedent) && !self.is_at_end() {
self.skip_newlines();
if self.check(&TokenKind::Dedent) {
break;
}
let field_name = self.expect_identifier()?;
self.expect(TokenKind::Colon, "expected ':' after field name")?;
let ty = self.parse_type()?;
fields.push(FieldDecl {
name: field_name,
ty,
span: start,
});
self.skip_newlines();
}
self.expect(TokenKind::Dedent, "expected end of struct body")?;
Ok(StructDecl {
is_public,
name,
fields,
span: start,
})
}
fn parse_block(&mut self) -> Result<Block, ParseError> {
self.expect(TokenKind::Indent, "expected indented block")?;
let mut statements = Vec::new();
while !self.check(&TokenKind::Dedent) && !self.is_at_end() {
self.skip_newlines();
if self.check(&TokenKind::Dedent) {
break;
}
statements.push(self.parse_statement()?);
self.skip_newlines();
}
self.expect(TokenKind::Dedent, "expected end of block")?;
Ok(Block { statements })
}
fn parse_statement(&mut self) -> Result<Stmt, ParseError> {
if self.matches_keyword(Keyword::Let) {
return self.parse_let(false);
}
if self.matches_keyword(Keyword::Var) {
return self.parse_let(true);
}
if self.matches_keyword(Keyword::Return) {
let span = self.previous_span();
if self.check(&TokenKind::Newline) || self.check(&TokenKind::Dedent) {
return Ok(Stmt::Return(None, span));
}
let expr = self.parse_expression()?;
return Ok(Stmt::Return(Some(expr), span));
}
Ok(Stmt::Expr(self.parse_expression()?))
}
fn parse_let(&mut self, mutable: bool) -> Result<Stmt, ParseError> {
let span = self.previous_span();
let name = self.expect_identifier()?;
let ty = if self.matches(TokenKind::Colon) {
Some(self.parse_type()?)
} else {
None
};
self.expect(TokenKind::Equal, "expected '=' in variable declaration")?;
let value = self.parse_expression()?;
Ok(Stmt::Let {
mutable,
name,
ty,
value,
span,
})
}
fn parse_expression(&mut self) -> Result<Expr, ParseError> {
let mut expr = self.parse_primary()?;
while self.matches(TokenKind::LeftParen) {
let mut args = Vec::new();
if !self.check(&TokenKind::RightParen) {
loop {
args.push(self.parse_expression()?);
if !self.matches(TokenKind::Comma) {
break;
}
}
}
let span = self.previous_span();
self.expect(TokenKind::RightParen, "expected ')' after arguments")?;
expr = Expr::Call {
callee: Box::new(expr),
args,
span,
};
}
Ok(expr)
}
fn parse_primary(&mut self) -> Result<Expr, ParseError> {
let token = self.advance().clone();
match token.kind {
TokenKind::Identifier(value) => Ok(Expr::Identifier(value, token.span)),
TokenKind::Integer(value) => {
let parsed = value.parse::<i64>().unwrap_or_default();
Ok(Expr::Integer(parsed, token.span))
}
TokenKind::String(value) => Ok(Expr::String(value, token.span)),
_ => Err(ParseError {
message: "expected expression".to_string(),
span: token.span,
}),
}
}
fn parse_type(&mut self) -> Result<TypeRef, ParseError> {
let token = self.advance().clone();
match token.kind {
TokenKind::Identifier(name) => Ok(TypeRef {
name,
span: token.span,
}),
_ => Err(ParseError {
message: "expected type name".to_string(),
span: token.span,
}),
}
}
fn expect_identifier(&mut self) -> Result<String, ParseError> {
let token = self.advance().clone();
match token.kind {
TokenKind::Identifier(name) => Ok(name),
_ => Err(ParseError {
message: "expected identifier".to_string(),
span: token.span,
}),
}
}
fn expect(&mut self, kind: TokenKind, message: &str) -> Result<(), ParseError> {
if self.matches(kind) {
Ok(())
} else {
Err(self.error_here(message))
}
}
fn matches_keyword(&mut self, keyword: Keyword) -> bool {
if matches!(self.peek().kind, TokenKind::Keyword(found) if found == keyword) {
self.advance();
return true;
}
false
}
fn matches(&mut self, kind: TokenKind) -> bool {
if self.check(&kind) {
self.advance();
return true;
}
false
}
fn check(&self, kind: &TokenKind) -> bool {
if self.is_at_end() {
return matches!(kind, TokenKind::Eof);
}
same_variant(&self.peek().kind, kind)
}
fn skip_newlines(&mut self) {
while self.matches(TokenKind::Newline) {}
}
fn is_at_end(&self) -> bool {
matches!(self.peek().kind, TokenKind::Eof)
}
fn peek(&self) -> &Token {
&self.tokens[self.current]
}
fn advance(&mut self) -> &Token {
if !self.is_at_end() {
self.current += 1;
}
&self.tokens[self.current.saturating_sub(1)]
}
fn previous_span(&self) -> Span {
if self.current == 0 {
Span::default()
} else {
self.tokens[self.current - 1].span
}
}
fn error_here(&self, message: &str) -> ParseError {
ParseError {
message: message.to_string(),
span: self.peek().span,
}
}
}
fn same_variant(left: &TokenKind, right: &TokenKind) -> bool {
std::mem::discriminant(left) == std::mem::discriminant(right)
}

View File

@@ -0,0 +1,126 @@
use std::fmt;
#[derive(Debug, Clone, Copy, PartialEq, Eq, Default)]
pub struct Span {
pub start: usize,
pub end: usize,
pub line: usize,
pub column: usize,
}
impl Span {
pub fn new(start: usize, end: usize, line: usize, column: usize) -> Self {
Self {
start,
end,
line,
column,
}
}
}
#[derive(Debug, Clone, PartialEq, Eq)]
pub struct Token {
pub kind: TokenKind,
pub span: Span,
}
impl Token {
pub fn new(kind: TokenKind, span: Span) -> Self {
Self { kind, span }
}
}
#[derive(Debug, Clone, PartialEq, Eq)]
pub enum TokenKind {
Identifier(String),
Integer(String),
String(String),
Keyword(Keyword),
LeftParen,
RightParen,
LeftBrace,
RightBrace,
LeftBracket,
RightBracket,
Comma,
Dot,
Colon,
Arrow,
FatArrow,
Plus,
Minus,
Star,
Slash,
Percent,
Equal,
EqualEqual,
Bang,
BangEqual,
Less,
LessEqual,
Greater,
GreaterEqual,
Question,
Newline,
Indent,
Dedent,
Eof,
}
#[derive(Debug, Clone, Copy, PartialEq, Eq)]
pub enum Keyword {
Async,
Else,
Fn,
For,
If,
Impl,
Import,
In,
Let,
Match,
Pub,
Return,
Struct,
Use,
Var,
While,
}
impl Keyword {
pub fn from_ident(value: &str) -> Option<Self> {
match value {
"async" => Some(Self::Async),
"else" => Some(Self::Else),
"fn" => Some(Self::Fn),
"for" => Some(Self::For),
"if" => Some(Self::If),
"impl" => Some(Self::Impl),
"import" => Some(Self::Import),
"in" => Some(Self::In),
"let" => Some(Self::Let),
"match" => Some(Self::Match),
"pub" => Some(Self::Pub),
"return" => Some(Self::Return),
"struct" => Some(Self::Struct),
"use" => Some(Self::Use),
"var" => Some(Self::Var),
"while" => Some(Self::While),
_ => None,
}
}
}
impl fmt::Display for TokenKind {
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
match self {
TokenKind::Identifier(name) => write!(f, "identifier({name})"),
TokenKind::Integer(value) => write!(f, "integer({value})"),
TokenKind::String(value) => write!(f, "string({value})"),
TokenKind::Keyword(keyword) => write!(f, "keyword({keyword:?})"),
other => write!(f, "{other:?}"),
}
}
}

View File

@@ -0,0 +1,10 @@
[package]
name = "nxc-runtime"
version.workspace = true
edition.workspace = true
license.workspace = true
authors.workspace = true
[lib]
path = "src/lib.rs"

View File

@@ -0,0 +1,10 @@
//! NexaCore runtime scaffolding.
//!
//! TODO:
//! - async scheduler
//! - string and collection runtime
//! - HTTP primitives
//! - PostgreSQL client ABI
pub const RUNTIME_VERSION: &str = "0.1.0";