chore: initialize NexaCore compiler workspace with basic frontend and CLI
Add initial project structure for NexaCore programming language compiler: - Create Cargo workspace with 4 crates (cli, driver, frontend, runtime) - Add lexer with indentation-based tokenization and keyword support - Add parser for modules, functions, structs, and basic expressions - Implement CLI with build command and placeholder subcommands - Add driver crate to orchestrate compilation pipeline - Include .gitignore for Rust build
This commit is contained in:
7
.gitignore
vendored
Normal file
7
.gitignore
vendored
Normal file
@@ -0,0 +1,7 @@
|
||||
target/
|
||||
dist/
|
||||
build/
|
||||
.DS_Store
|
||||
*.tmp
|
||||
*.log
|
||||
|
||||
15
Cargo.toml
Normal file
15
Cargo.toml
Normal file
@@ -0,0 +1,15 @@
|
||||
[workspace]
|
||||
members = [
|
||||
"crates/nxc-cli",
|
||||
"crates/nxc-driver",
|
||||
"crates/nxc-frontend",
|
||||
"crates/nxc-runtime",
|
||||
]
|
||||
resolver = "2"
|
||||
|
||||
[workspace.package]
|
||||
edition = "2021"
|
||||
license = "Apache-2.0"
|
||||
version = "0.1.0"
|
||||
authors = ["NexaCore Contributors"]
|
||||
|
||||
15
README.md
Normal file
15
README.md
Normal file
@@ -0,0 +1,15 @@
|
||||
# NexaCore
|
||||
|
||||
NexaCore is a new compiled programming language for backend systems, APIs, databases, and long-running services.
|
||||
|
||||
This repository contains:
|
||||
|
||||
- the language design foundation
|
||||
- the compiler workspace
|
||||
- the runtime and standard library layout
|
||||
- starter examples for backend-oriented development
|
||||
|
||||
The first MVP compiler path compiles NexaCore source into a typed intermediate representation and then emits C as the initial backend target. That keeps the compiler realistic to build now while preserving a clean path toward a later native backend.
|
||||
|
||||
See [docs/nexacore-foundation.md](/mnt/c/Users/neste/Documents/GIT/NexaCore/docs/nexacore-foundation.md) for the architectural design and MVP roadmap.
|
||||
|
||||
14
crates/nxc-cli/Cargo.toml
Normal file
14
crates/nxc-cli/Cargo.toml
Normal file
@@ -0,0 +1,14 @@
|
||||
[package]
|
||||
name = "nxc-cli"
|
||||
version.workspace = true
|
||||
edition.workspace = true
|
||||
license.workspace = true
|
||||
authors.workspace = true
|
||||
|
||||
[[bin]]
|
||||
name = "nexacore"
|
||||
path = "src/main.rs"
|
||||
|
||||
[dependencies]
|
||||
nxc-driver = { path = "../nxc-driver" }
|
||||
|
||||
65
crates/nxc-cli/src/main.rs
Normal file
65
crates/nxc-cli/src/main.rs
Normal file
@@ -0,0 +1,65 @@
|
||||
use std::env;
|
||||
use std::path::Path;
|
||||
use std::process::ExitCode;
|
||||
|
||||
fn main() -> ExitCode {
|
||||
match run() {
|
||||
Ok(()) => ExitCode::SUCCESS,
|
||||
Err(message) => {
|
||||
eprintln!("{message}");
|
||||
ExitCode::FAILURE
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
fn run() -> Result<(), String> {
|
||||
let mut args = env::args().skip(1);
|
||||
let Some(command) = args.next() else {
|
||||
print_help();
|
||||
return Ok(());
|
||||
};
|
||||
|
||||
match command.as_str() {
|
||||
"build" => {
|
||||
let Some(path) = args.next() else {
|
||||
return Err("usage: nexacore build <file.nx>".to_string());
|
||||
};
|
||||
let result = nxc_driver::compile_file(Path::new(&path))
|
||||
.map_err(format_compile_error)?;
|
||||
println!("compiled {path}");
|
||||
println!("tokens: {}", result.tokens.len());
|
||||
println!("items: {}", result.module.items.len());
|
||||
Ok(())
|
||||
}
|
||||
"run" => Err("runtime execution is not implemented yet".to_string()),
|
||||
"new" => Err("project scaffolding is not implemented yet".to_string()),
|
||||
"test" => Err("test runner is not implemented yet".to_string()),
|
||||
"fmt" => Err("formatter is not implemented yet".to_string()),
|
||||
"add" => Err("package manager is not implemented yet".to_string()),
|
||||
"doc" => Err("docs generator is not implemented yet".to_string()),
|
||||
_ => Err(format!("unknown command: {command}")),
|
||||
}
|
||||
}
|
||||
|
||||
fn format_compile_error(error: nxc_driver::CompileError) -> String {
|
||||
match error {
|
||||
nxc_driver::CompileError::Io(io) => format!("io error: {io}"),
|
||||
nxc_driver::CompileError::Parse(parse) => format!(
|
||||
"parse error at line {}, column {}: {}",
|
||||
parse.span.line, parse.span.column, parse.message
|
||||
),
|
||||
}
|
||||
}
|
||||
|
||||
fn print_help() {
|
||||
println!("NexaCore CLI");
|
||||
println!("usage:");
|
||||
println!(" nexacore build <file.nx>");
|
||||
println!(" nexacore run <file.nx>");
|
||||
println!(" nexacore new <name>");
|
||||
println!(" nexacore test");
|
||||
println!(" nexacore fmt");
|
||||
println!(" nexacore add <package>");
|
||||
println!(" nexacore doc");
|
||||
}
|
||||
|
||||
13
crates/nxc-driver/Cargo.toml
Normal file
13
crates/nxc-driver/Cargo.toml
Normal file
@@ -0,0 +1,13 @@
|
||||
[package]
|
||||
name = "nxc-driver"
|
||||
version.workspace = true
|
||||
edition.workspace = true
|
||||
license.workspace = true
|
||||
authors.workspace = true
|
||||
|
||||
[lib]
|
||||
path = "src/lib.rs"
|
||||
|
||||
[dependencies]
|
||||
nxc-frontend = { path = "../nxc-frontend" }
|
||||
|
||||
41
crates/nxc-driver/src/lib.rs
Normal file
41
crates/nxc-driver/src/lib.rs
Normal file
@@ -0,0 +1,41 @@
|
||||
use std::fs;
|
||||
use std::path::Path;
|
||||
|
||||
use nxc_frontend::{Lexer, Module, ParseError, Parser, Token};
|
||||
|
||||
#[derive(Debug)]
|
||||
pub struct CompileResult {
|
||||
pub tokens: Vec<Token>,
|
||||
pub module: Module,
|
||||
}
|
||||
|
||||
#[derive(Debug)]
|
||||
pub enum CompileError {
|
||||
Io(std::io::Error),
|
||||
Parse(ParseError),
|
||||
}
|
||||
|
||||
impl From<std::io::Error> for CompileError {
|
||||
fn from(value: std::io::Error) -> Self {
|
||||
Self::Io(value)
|
||||
}
|
||||
}
|
||||
|
||||
impl From<ParseError> for CompileError {
|
||||
fn from(value: ParseError) -> Self {
|
||||
Self::Parse(value)
|
||||
}
|
||||
}
|
||||
|
||||
pub fn compile_file(path: impl AsRef<Path>) -> Result<CompileResult, CompileError> {
|
||||
let source = fs::read_to_string(path)?;
|
||||
compile_source(&source)
|
||||
}
|
||||
|
||||
pub fn compile_source(source: &str) -> Result<CompileResult, CompileError> {
|
||||
let tokens = Lexer::new(source).tokenize();
|
||||
let mut parser = Parser::new(tokens.clone());
|
||||
let module = parser.parse_module()?;
|
||||
Ok(CompileResult { tokens, module })
|
||||
}
|
||||
|
||||
10
crates/nxc-frontend/Cargo.toml
Normal file
10
crates/nxc-frontend/Cargo.toml
Normal file
@@ -0,0 +1,10 @@
|
||||
[package]
|
||||
name = "nxc-frontend"
|
||||
version.workspace = true
|
||||
edition.workspace = true
|
||||
license.workspace = true
|
||||
authors.workspace = true
|
||||
|
||||
[lib]
|
||||
path = "src/lib.rs"
|
||||
|
||||
89
crates/nxc-frontend/src/ast.rs
Normal file
89
crates/nxc-frontend/src/ast.rs
Normal file
@@ -0,0 +1,89 @@
|
||||
use crate::token::Span;
|
||||
|
||||
#[derive(Debug, Clone, Default)]
|
||||
pub struct Module {
|
||||
pub items: Vec<Item>,
|
||||
}
|
||||
|
||||
#[derive(Debug, Clone)]
|
||||
pub enum Item {
|
||||
Use(UseDecl),
|
||||
Function(FunctionDecl),
|
||||
Struct(StructDecl),
|
||||
}
|
||||
|
||||
#[derive(Debug, Clone)]
|
||||
pub struct UseDecl {
|
||||
pub path: Vec<String>,
|
||||
pub span: Span,
|
||||
}
|
||||
|
||||
#[derive(Debug, Clone)]
|
||||
pub struct FunctionDecl {
|
||||
pub is_public: bool,
|
||||
pub is_async: bool,
|
||||
pub name: String,
|
||||
pub params: Vec<Param>,
|
||||
pub return_type: Option<TypeRef>,
|
||||
pub body: Block,
|
||||
pub span: Span,
|
||||
}
|
||||
|
||||
#[derive(Debug, Clone)]
|
||||
pub struct Param {
|
||||
pub name: String,
|
||||
pub ty: TypeRef,
|
||||
pub span: Span,
|
||||
}
|
||||
|
||||
#[derive(Debug, Clone)]
|
||||
pub struct StructDecl {
|
||||
pub is_public: bool,
|
||||
pub name: String,
|
||||
pub fields: Vec<FieldDecl>,
|
||||
pub span: Span,
|
||||
}
|
||||
|
||||
#[derive(Debug, Clone)]
|
||||
pub struct FieldDecl {
|
||||
pub name: String,
|
||||
pub ty: TypeRef,
|
||||
pub span: Span,
|
||||
}
|
||||
|
||||
#[derive(Debug, Clone, Default)]
|
||||
pub struct Block {
|
||||
pub statements: Vec<Stmt>,
|
||||
}
|
||||
|
||||
#[derive(Debug, Clone)]
|
||||
pub enum Stmt {
|
||||
Let {
|
||||
mutable: bool,
|
||||
name: String,
|
||||
ty: Option<TypeRef>,
|
||||
value: Expr,
|
||||
span: Span,
|
||||
},
|
||||
Expr(Expr),
|
||||
Return(Option<Expr>, Span),
|
||||
}
|
||||
|
||||
#[derive(Debug, Clone)]
|
||||
pub enum Expr {
|
||||
Identifier(String, Span),
|
||||
Integer(i64, Span),
|
||||
String(String, Span),
|
||||
Call {
|
||||
callee: Box<Expr>,
|
||||
args: Vec<Expr>,
|
||||
span: Span,
|
||||
},
|
||||
}
|
||||
|
||||
#[derive(Debug, Clone)]
|
||||
pub struct TypeRef {
|
||||
pub name: String,
|
||||
pub span: Span,
|
||||
}
|
||||
|
||||
360
crates/nxc-frontend/src/lexer.rs
Normal file
360
crates/nxc-frontend/src/lexer.rs
Normal file
@@ -0,0 +1,360 @@
|
||||
use crate::token::{Keyword, Span, Token, TokenKind};
|
||||
|
||||
#[derive(Debug)]
|
||||
pub struct Lexer<'src> {
|
||||
chars: Vec<char>,
|
||||
position: usize,
|
||||
line: usize,
|
||||
column: usize,
|
||||
indent_stack: Vec<usize>,
|
||||
pending_dedents: usize,
|
||||
at_line_start: bool,
|
||||
finished: bool,
|
||||
_source: &'src str,
|
||||
}
|
||||
|
||||
impl<'src> Lexer<'src> {
|
||||
pub fn new(source: &'src str) -> Self {
|
||||
Self {
|
||||
chars: source.chars().collect(),
|
||||
position: 0,
|
||||
line: 1,
|
||||
column: 1,
|
||||
indent_stack: vec![0],
|
||||
pending_dedents: 0,
|
||||
at_line_start: true,
|
||||
finished: false,
|
||||
_source: source,
|
||||
}
|
||||
}
|
||||
|
||||
pub fn tokenize(mut self) -> Vec<Token> {
|
||||
let mut tokens = Vec::new();
|
||||
while let Some(token) = self.next_token() {
|
||||
let is_eof = matches!(token.kind, TokenKind::Eof);
|
||||
tokens.push(token);
|
||||
if is_eof {
|
||||
break;
|
||||
}
|
||||
}
|
||||
tokens
|
||||
}
|
||||
|
||||
fn next_token(&mut self) -> Option<Token> {
|
||||
if self.finished {
|
||||
return None;
|
||||
}
|
||||
|
||||
if self.pending_dedents > 0 {
|
||||
self.pending_dedents -= 1;
|
||||
return Some(self.make_token(TokenKind::Dedent, self.position, self.position));
|
||||
}
|
||||
|
||||
if self.at_line_start {
|
||||
let indent = self.consume_indentation();
|
||||
let current = *self.indent_stack.last().unwrap_or(&0);
|
||||
if indent > current {
|
||||
self.indent_stack.push(indent);
|
||||
self.at_line_start = false;
|
||||
return Some(self.make_token(TokenKind::Indent, self.position, self.position));
|
||||
}
|
||||
|
||||
if indent < current {
|
||||
while let Some(&last) = self.indent_stack.last() {
|
||||
if indent < last {
|
||||
self.indent_stack.pop();
|
||||
self.pending_dedents += 1;
|
||||
} else {
|
||||
break;
|
||||
}
|
||||
}
|
||||
|
||||
self.at_line_start = false;
|
||||
if self.pending_dedents > 0 {
|
||||
self.pending_dedents -= 1;
|
||||
return Some(self.make_token(TokenKind::Dedent, self.position, self.position));
|
||||
}
|
||||
}
|
||||
self.at_line_start = false;
|
||||
}
|
||||
|
||||
self.skip_inline_whitespace();
|
||||
|
||||
let start = self.position;
|
||||
let line = self.line;
|
||||
let column = self.column;
|
||||
|
||||
let ch = match self.peek() {
|
||||
Some(ch) => ch,
|
||||
None => {
|
||||
if self.indent_stack.len() > 1 {
|
||||
self.indent_stack.pop();
|
||||
return Some(Token::new(
|
||||
TokenKind::Dedent,
|
||||
Span::new(start, start, line, column),
|
||||
));
|
||||
}
|
||||
self.finished = true;
|
||||
return Some(Token::new(
|
||||
TokenKind::Eof,
|
||||
Span::new(start, start, line, column),
|
||||
));
|
||||
}
|
||||
};
|
||||
|
||||
if ch == '\n' {
|
||||
self.bump();
|
||||
self.at_line_start = true;
|
||||
return Some(Token::new(
|
||||
TokenKind::Newline,
|
||||
Span::new(start, self.position, line, column),
|
||||
));
|
||||
}
|
||||
|
||||
if ch == '"' {
|
||||
return Some(self.lex_string());
|
||||
}
|
||||
|
||||
if ch.is_ascii_digit() {
|
||||
return Some(self.lex_number());
|
||||
}
|
||||
|
||||
if is_ident_start(ch) {
|
||||
return Some(self.lex_identifier());
|
||||
}
|
||||
|
||||
let token = match ch {
|
||||
'(' => single(self, TokenKind::LeftParen),
|
||||
')' => single(self, TokenKind::RightParen),
|
||||
'{' => single(self, TokenKind::LeftBrace),
|
||||
'}' => single(self, TokenKind::RightBrace),
|
||||
'[' => single(self, TokenKind::LeftBracket),
|
||||
']' => single(self, TokenKind::RightBracket),
|
||||
',' => single(self, TokenKind::Comma),
|
||||
'.' => single(self, TokenKind::Dot),
|
||||
':' => single(self, TokenKind::Colon),
|
||||
'+' => single(self, TokenKind::Plus),
|
||||
'*' => single(self, TokenKind::Star),
|
||||
'/' => single(self, TokenKind::Slash),
|
||||
'%' => single(self, TokenKind::Percent),
|
||||
'?' => single(self, TokenKind::Question),
|
||||
'-' => {
|
||||
self.bump();
|
||||
if self.peek() == Some('>') {
|
||||
self.bump();
|
||||
Token::new(TokenKind::Arrow, Span::new(start, self.position, line, column))
|
||||
} else {
|
||||
Token::new(TokenKind::Minus, Span::new(start, self.position, line, column))
|
||||
}
|
||||
}
|
||||
'=' => {
|
||||
self.bump();
|
||||
match self.peek() {
|
||||
Some('=') => {
|
||||
self.bump();
|
||||
Token::new(
|
||||
TokenKind::EqualEqual,
|
||||
Span::new(start, self.position, line, column),
|
||||
)
|
||||
}
|
||||
Some('>') => {
|
||||
self.bump();
|
||||
Token::new(
|
||||
TokenKind::FatArrow,
|
||||
Span::new(start, self.position, line, column),
|
||||
)
|
||||
}
|
||||
_ => Token::new(TokenKind::Equal, Span::new(start, self.position, line, column)),
|
||||
}
|
||||
}
|
||||
'!' => {
|
||||
self.bump();
|
||||
if self.peek() == Some('=') {
|
||||
self.bump();
|
||||
Token::new(
|
||||
TokenKind::BangEqual,
|
||||
Span::new(start, self.position, line, column),
|
||||
)
|
||||
} else {
|
||||
Token::new(TokenKind::Bang, Span::new(start, self.position, line, column))
|
||||
}
|
||||
}
|
||||
'<' => {
|
||||
self.bump();
|
||||
if self.peek() == Some('=') {
|
||||
self.bump();
|
||||
Token::new(
|
||||
TokenKind::LessEqual,
|
||||
Span::new(start, self.position, line, column),
|
||||
)
|
||||
} else {
|
||||
Token::new(TokenKind::Less, Span::new(start, self.position, line, column))
|
||||
}
|
||||
}
|
||||
'>' => {
|
||||
self.bump();
|
||||
if self.peek() == Some('=') {
|
||||
self.bump();
|
||||
Token::new(
|
||||
TokenKind::GreaterEqual,
|
||||
Span::new(start, self.position, line, column),
|
||||
)
|
||||
} else {
|
||||
Token::new(TokenKind::Greater, Span::new(start, self.position, line, column))
|
||||
}
|
||||
}
|
||||
_ => {
|
||||
self.bump();
|
||||
Token::new(TokenKind::Newline, Span::new(start, self.position, line, column))
|
||||
}
|
||||
};
|
||||
|
||||
Some(token)
|
||||
}
|
||||
|
||||
fn consume_indentation(&mut self) -> usize {
|
||||
let mut indent = 0;
|
||||
loop {
|
||||
match self.peek() {
|
||||
Some(' ') => {
|
||||
self.bump();
|
||||
indent += 1;
|
||||
}
|
||||
Some('\t') => {
|
||||
self.bump();
|
||||
indent += 4;
|
||||
}
|
||||
Some('\n') => return 0,
|
||||
Some('#') => {
|
||||
while let Some(ch) = self.peek() {
|
||||
self.bump();
|
||||
if ch == '\n' {
|
||||
self.at_line_start = true;
|
||||
break;
|
||||
}
|
||||
}
|
||||
return 0;
|
||||
}
|
||||
_ => break,
|
||||
}
|
||||
}
|
||||
indent
|
||||
}
|
||||
|
||||
fn skip_inline_whitespace(&mut self) {
|
||||
while let Some(ch) = self.peek() {
|
||||
if ch == ' ' || ch == '\t' || ch == '\r' {
|
||||
self.bump();
|
||||
continue;
|
||||
}
|
||||
|
||||
if ch == '#' {
|
||||
while let Some(comment) = self.peek() {
|
||||
self.bump();
|
||||
if comment == '\n' {
|
||||
self.at_line_start = true;
|
||||
break;
|
||||
}
|
||||
}
|
||||
continue;
|
||||
}
|
||||
|
||||
break;
|
||||
}
|
||||
}
|
||||
|
||||
fn lex_identifier(&mut self) -> Token {
|
||||
let start = self.position;
|
||||
let line = self.line;
|
||||
let column = self.column;
|
||||
let mut value = String::new();
|
||||
while let Some(ch) = self.peek() {
|
||||
if is_ident_continue(ch) {
|
||||
value.push(ch);
|
||||
self.bump();
|
||||
} else {
|
||||
break;
|
||||
}
|
||||
}
|
||||
let kind = match Keyword::from_ident(&value) {
|
||||
Some(keyword) => TokenKind::Keyword(keyword),
|
||||
None => TokenKind::Identifier(value),
|
||||
};
|
||||
Token::new(kind, Span::new(start, self.position, line, column))
|
||||
}
|
||||
|
||||
fn lex_number(&mut self) -> Token {
|
||||
let start = self.position;
|
||||
let line = self.line;
|
||||
let column = self.column;
|
||||
let mut value = String::new();
|
||||
while let Some(ch) = self.peek() {
|
||||
if ch.is_ascii_digit() {
|
||||
value.push(ch);
|
||||
self.bump();
|
||||
} else {
|
||||
break;
|
||||
}
|
||||
}
|
||||
Token::new(
|
||||
TokenKind::Integer(value),
|
||||
Span::new(start, self.position, line, column),
|
||||
)
|
||||
}
|
||||
|
||||
fn lex_string(&mut self) -> Token {
|
||||
let start = self.position;
|
||||
let line = self.line;
|
||||
let column = self.column;
|
||||
self.bump();
|
||||
let mut value = String::new();
|
||||
while let Some(ch) = self.peek() {
|
||||
self.bump();
|
||||
if ch == '"' {
|
||||
break;
|
||||
}
|
||||
value.push(ch);
|
||||
}
|
||||
Token::new(
|
||||
TokenKind::String(value),
|
||||
Span::new(start, self.position, line, column),
|
||||
)
|
||||
}
|
||||
|
||||
fn peek(&self) -> Option<char> {
|
||||
self.chars.get(self.position).copied()
|
||||
}
|
||||
|
||||
fn bump(&mut self) {
|
||||
if let Some(ch) = self.peek() {
|
||||
self.position += 1;
|
||||
if ch == '\n' {
|
||||
self.line += 1;
|
||||
self.column = 1;
|
||||
} else {
|
||||
self.column += 1;
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
fn make_token(&self, kind: TokenKind, start: usize, end: usize) -> Token {
|
||||
Token::new(kind, Span::new(start, end, self.line, self.column))
|
||||
}
|
||||
}
|
||||
|
||||
fn single(lexer: &mut Lexer<'_>, kind: TokenKind) -> Token {
|
||||
let start = lexer.position;
|
||||
let line = lexer.line;
|
||||
let column = lexer.column;
|
||||
lexer.bump();
|
||||
Token::new(kind, Span::new(start, lexer.position, line, column))
|
||||
}
|
||||
|
||||
fn is_ident_start(ch: char) -> bool {
|
||||
ch.is_ascii_alphabetic() || ch == '_'
|
||||
}
|
||||
|
||||
fn is_ident_continue(ch: char) -> bool {
|
||||
is_ident_start(ch) || ch.is_ascii_digit()
|
||||
}
|
||||
|
||||
10
crates/nxc-frontend/src/lib.rs
Normal file
10
crates/nxc-frontend/src/lib.rs
Normal file
@@ -0,0 +1,10 @@
|
||||
pub mod ast;
|
||||
pub mod lexer;
|
||||
pub mod parser;
|
||||
pub mod token;
|
||||
|
||||
pub use ast::Module;
|
||||
pub use lexer::Lexer;
|
||||
pub use parser::{ParseError, Parser};
|
||||
pub use token::{Keyword, Span, Token, TokenKind};
|
||||
|
||||
332
crates/nxc-frontend/src/parser.rs
Normal file
332
crates/nxc-frontend/src/parser.rs
Normal file
@@ -0,0 +1,332 @@
|
||||
use crate::ast::{
|
||||
Block, Expr, FieldDecl, FunctionDecl, Item, Module, Param, Stmt, StructDecl, TypeRef, UseDecl,
|
||||
};
|
||||
use crate::token::{Keyword, Span, Token, TokenKind};
|
||||
|
||||
#[derive(Debug, Clone)]
|
||||
pub struct ParseError {
|
||||
pub message: String,
|
||||
pub span: Span,
|
||||
}
|
||||
|
||||
pub struct Parser {
|
||||
tokens: Vec<Token>,
|
||||
current: usize,
|
||||
}
|
||||
|
||||
impl Parser {
|
||||
pub fn new(tokens: Vec<Token>) -> Self {
|
||||
Self { tokens, current: 0 }
|
||||
}
|
||||
|
||||
pub fn parse_module(&mut self) -> Result<Module, ParseError> {
|
||||
let mut items = Vec::new();
|
||||
while !self.is_at_end() {
|
||||
self.skip_newlines();
|
||||
if self.is_at_end() {
|
||||
break;
|
||||
}
|
||||
items.push(self.parse_item()?);
|
||||
self.skip_newlines();
|
||||
}
|
||||
Ok(Module { items })
|
||||
}
|
||||
|
||||
fn parse_item(&mut self) -> Result<Item, ParseError> {
|
||||
if self.matches_keyword(Keyword::Use) {
|
||||
return self.parse_use().map(Item::Use);
|
||||
}
|
||||
|
||||
let is_public = self.matches_keyword(Keyword::Pub);
|
||||
let is_async = self.matches_keyword(Keyword::Async);
|
||||
|
||||
if self.matches_keyword(Keyword::Fn) {
|
||||
return self.parse_function(is_public, is_async).map(Item::Function);
|
||||
}
|
||||
|
||||
if self.matches_keyword(Keyword::Struct) {
|
||||
return self.parse_struct(is_public).map(Item::Struct);
|
||||
}
|
||||
|
||||
Err(self.error_here("expected module item"))
|
||||
}
|
||||
|
||||
fn parse_use(&mut self) -> Result<UseDecl, ParseError> {
|
||||
let start = self.previous_span();
|
||||
let mut path = Vec::new();
|
||||
path.push(self.expect_identifier()?);
|
||||
while self.matches(TokenKind::Dot) {
|
||||
path.push(self.expect_identifier()?);
|
||||
}
|
||||
Ok(UseDecl { path, span: start })
|
||||
}
|
||||
|
||||
fn parse_function(
|
||||
&mut self,
|
||||
is_public: bool,
|
||||
is_async: bool,
|
||||
) -> Result<FunctionDecl, ParseError> {
|
||||
let start = self.previous_span();
|
||||
let name = self.expect_identifier()?;
|
||||
self.expect(TokenKind::LeftParen, "expected '(' after function name")?;
|
||||
let mut params = Vec::new();
|
||||
if !self.check(&TokenKind::RightParen) {
|
||||
loop {
|
||||
let param_name = self.expect_identifier()?;
|
||||
self.expect(TokenKind::Colon, "expected ':' after parameter name")?;
|
||||
let ty = self.parse_type()?;
|
||||
params.push(Param {
|
||||
name: param_name,
|
||||
ty,
|
||||
span: start,
|
||||
});
|
||||
if !self.matches(TokenKind::Comma) {
|
||||
break;
|
||||
}
|
||||
}
|
||||
}
|
||||
self.expect(TokenKind::RightParen, "expected ')' after parameters")?;
|
||||
|
||||
let return_type = if self.matches(TokenKind::Arrow) {
|
||||
Some(self.parse_type()?)
|
||||
} else {
|
||||
None
|
||||
};
|
||||
|
||||
self.expect(TokenKind::Colon, "expected ':' before function body")?;
|
||||
self.skip_newlines();
|
||||
let body = self.parse_block()?;
|
||||
|
||||
Ok(FunctionDecl {
|
||||
is_public,
|
||||
is_async,
|
||||
name,
|
||||
params,
|
||||
return_type,
|
||||
body,
|
||||
span: start,
|
||||
})
|
||||
}
|
||||
|
||||
fn parse_struct(&mut self, is_public: bool) -> Result<StructDecl, ParseError> {
|
||||
let start = self.previous_span();
|
||||
let name = self.expect_identifier()?;
|
||||
self.expect(TokenKind::Colon, "expected ':' after struct name")?;
|
||||
self.skip_newlines();
|
||||
self.expect(TokenKind::Indent, "expected indented struct body")?;
|
||||
|
||||
let mut fields = Vec::new();
|
||||
while !self.check(&TokenKind::Dedent) && !self.is_at_end() {
|
||||
self.skip_newlines();
|
||||
if self.check(&TokenKind::Dedent) {
|
||||
break;
|
||||
}
|
||||
let field_name = self.expect_identifier()?;
|
||||
self.expect(TokenKind::Colon, "expected ':' after field name")?;
|
||||
let ty = self.parse_type()?;
|
||||
fields.push(FieldDecl {
|
||||
name: field_name,
|
||||
ty,
|
||||
span: start,
|
||||
});
|
||||
self.skip_newlines();
|
||||
}
|
||||
|
||||
self.expect(TokenKind::Dedent, "expected end of struct body")?;
|
||||
|
||||
Ok(StructDecl {
|
||||
is_public,
|
||||
name,
|
||||
fields,
|
||||
span: start,
|
||||
})
|
||||
}
|
||||
|
||||
fn parse_block(&mut self) -> Result<Block, ParseError> {
|
||||
self.expect(TokenKind::Indent, "expected indented block")?;
|
||||
let mut statements = Vec::new();
|
||||
|
||||
while !self.check(&TokenKind::Dedent) && !self.is_at_end() {
|
||||
self.skip_newlines();
|
||||
if self.check(&TokenKind::Dedent) {
|
||||
break;
|
||||
}
|
||||
statements.push(self.parse_statement()?);
|
||||
self.skip_newlines();
|
||||
}
|
||||
|
||||
self.expect(TokenKind::Dedent, "expected end of block")?;
|
||||
Ok(Block { statements })
|
||||
}
|
||||
|
||||
fn parse_statement(&mut self) -> Result<Stmt, ParseError> {
|
||||
if self.matches_keyword(Keyword::Let) {
|
||||
return self.parse_let(false);
|
||||
}
|
||||
if self.matches_keyword(Keyword::Var) {
|
||||
return self.parse_let(true);
|
||||
}
|
||||
if self.matches_keyword(Keyword::Return) {
|
||||
let span = self.previous_span();
|
||||
if self.check(&TokenKind::Newline) || self.check(&TokenKind::Dedent) {
|
||||
return Ok(Stmt::Return(None, span));
|
||||
}
|
||||
let expr = self.parse_expression()?;
|
||||
return Ok(Stmt::Return(Some(expr), span));
|
||||
}
|
||||
Ok(Stmt::Expr(self.parse_expression()?))
|
||||
}
|
||||
|
||||
fn parse_let(&mut self, mutable: bool) -> Result<Stmt, ParseError> {
|
||||
let span = self.previous_span();
|
||||
let name = self.expect_identifier()?;
|
||||
let ty = if self.matches(TokenKind::Colon) {
|
||||
Some(self.parse_type()?)
|
||||
} else {
|
||||
None
|
||||
};
|
||||
self.expect(TokenKind::Equal, "expected '=' in variable declaration")?;
|
||||
let value = self.parse_expression()?;
|
||||
Ok(Stmt::Let {
|
||||
mutable,
|
||||
name,
|
||||
ty,
|
||||
value,
|
||||
span,
|
||||
})
|
||||
}
|
||||
|
||||
fn parse_expression(&mut self) -> Result<Expr, ParseError> {
|
||||
let mut expr = self.parse_primary()?;
|
||||
while self.matches(TokenKind::LeftParen) {
|
||||
let mut args = Vec::new();
|
||||
if !self.check(&TokenKind::RightParen) {
|
||||
loop {
|
||||
args.push(self.parse_expression()?);
|
||||
if !self.matches(TokenKind::Comma) {
|
||||
break;
|
||||
}
|
||||
}
|
||||
}
|
||||
let span = self.previous_span();
|
||||
self.expect(TokenKind::RightParen, "expected ')' after arguments")?;
|
||||
expr = Expr::Call {
|
||||
callee: Box::new(expr),
|
||||
args,
|
||||
span,
|
||||
};
|
||||
}
|
||||
Ok(expr)
|
||||
}
|
||||
|
||||
fn parse_primary(&mut self) -> Result<Expr, ParseError> {
|
||||
let token = self.advance().clone();
|
||||
match token.kind {
|
||||
TokenKind::Identifier(value) => Ok(Expr::Identifier(value, token.span)),
|
||||
TokenKind::Integer(value) => {
|
||||
let parsed = value.parse::<i64>().unwrap_or_default();
|
||||
Ok(Expr::Integer(parsed, token.span))
|
||||
}
|
||||
TokenKind::String(value) => Ok(Expr::String(value, token.span)),
|
||||
_ => Err(ParseError {
|
||||
message: "expected expression".to_string(),
|
||||
span: token.span,
|
||||
}),
|
||||
}
|
||||
}
|
||||
|
||||
fn parse_type(&mut self) -> Result<TypeRef, ParseError> {
|
||||
let token = self.advance().clone();
|
||||
match token.kind {
|
||||
TokenKind::Identifier(name) => Ok(TypeRef {
|
||||
name,
|
||||
span: token.span,
|
||||
}),
|
||||
_ => Err(ParseError {
|
||||
message: "expected type name".to_string(),
|
||||
span: token.span,
|
||||
}),
|
||||
}
|
||||
}
|
||||
|
||||
fn expect_identifier(&mut self) -> Result<String, ParseError> {
|
||||
let token = self.advance().clone();
|
||||
match token.kind {
|
||||
TokenKind::Identifier(name) => Ok(name),
|
||||
_ => Err(ParseError {
|
||||
message: "expected identifier".to_string(),
|
||||
span: token.span,
|
||||
}),
|
||||
}
|
||||
}
|
||||
|
||||
fn expect(&mut self, kind: TokenKind, message: &str) -> Result<(), ParseError> {
|
||||
if self.matches(kind) {
|
||||
Ok(())
|
||||
} else {
|
||||
Err(self.error_here(message))
|
||||
}
|
||||
}
|
||||
|
||||
fn matches_keyword(&mut self, keyword: Keyword) -> bool {
|
||||
if matches!(self.peek().kind, TokenKind::Keyword(found) if found == keyword) {
|
||||
self.advance();
|
||||
return true;
|
||||
}
|
||||
false
|
||||
}
|
||||
|
||||
fn matches(&mut self, kind: TokenKind) -> bool {
|
||||
if self.check(&kind) {
|
||||
self.advance();
|
||||
return true;
|
||||
}
|
||||
false
|
||||
}
|
||||
|
||||
fn check(&self, kind: &TokenKind) -> bool {
|
||||
if self.is_at_end() {
|
||||
return matches!(kind, TokenKind::Eof);
|
||||
}
|
||||
same_variant(&self.peek().kind, kind)
|
||||
}
|
||||
|
||||
fn skip_newlines(&mut self) {
|
||||
while self.matches(TokenKind::Newline) {}
|
||||
}
|
||||
|
||||
fn is_at_end(&self) -> bool {
|
||||
matches!(self.peek().kind, TokenKind::Eof)
|
||||
}
|
||||
|
||||
fn peek(&self) -> &Token {
|
||||
&self.tokens[self.current]
|
||||
}
|
||||
|
||||
fn advance(&mut self) -> &Token {
|
||||
if !self.is_at_end() {
|
||||
self.current += 1;
|
||||
}
|
||||
&self.tokens[self.current.saturating_sub(1)]
|
||||
}
|
||||
|
||||
fn previous_span(&self) -> Span {
|
||||
if self.current == 0 {
|
||||
Span::default()
|
||||
} else {
|
||||
self.tokens[self.current - 1].span
|
||||
}
|
||||
}
|
||||
|
||||
fn error_here(&self, message: &str) -> ParseError {
|
||||
ParseError {
|
||||
message: message.to_string(),
|
||||
span: self.peek().span,
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
fn same_variant(left: &TokenKind, right: &TokenKind) -> bool {
|
||||
std::mem::discriminant(left) == std::mem::discriminant(right)
|
||||
}
|
||||
|
||||
126
crates/nxc-frontend/src/token.rs
Normal file
126
crates/nxc-frontend/src/token.rs
Normal file
@@ -0,0 +1,126 @@
|
||||
use std::fmt;
|
||||
|
||||
#[derive(Debug, Clone, Copy, PartialEq, Eq, Default)]
|
||||
pub struct Span {
|
||||
pub start: usize,
|
||||
pub end: usize,
|
||||
pub line: usize,
|
||||
pub column: usize,
|
||||
}
|
||||
|
||||
impl Span {
|
||||
pub fn new(start: usize, end: usize, line: usize, column: usize) -> Self {
|
||||
Self {
|
||||
start,
|
||||
end,
|
||||
line,
|
||||
column,
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
#[derive(Debug, Clone, PartialEq, Eq)]
|
||||
pub struct Token {
|
||||
pub kind: TokenKind,
|
||||
pub span: Span,
|
||||
}
|
||||
|
||||
impl Token {
|
||||
pub fn new(kind: TokenKind, span: Span) -> Self {
|
||||
Self { kind, span }
|
||||
}
|
||||
}
|
||||
|
||||
#[derive(Debug, Clone, PartialEq, Eq)]
|
||||
pub enum TokenKind {
|
||||
Identifier(String),
|
||||
Integer(String),
|
||||
String(String),
|
||||
Keyword(Keyword),
|
||||
LeftParen,
|
||||
RightParen,
|
||||
LeftBrace,
|
||||
RightBrace,
|
||||
LeftBracket,
|
||||
RightBracket,
|
||||
Comma,
|
||||
Dot,
|
||||
Colon,
|
||||
Arrow,
|
||||
FatArrow,
|
||||
Plus,
|
||||
Minus,
|
||||
Star,
|
||||
Slash,
|
||||
Percent,
|
||||
Equal,
|
||||
EqualEqual,
|
||||
Bang,
|
||||
BangEqual,
|
||||
Less,
|
||||
LessEqual,
|
||||
Greater,
|
||||
GreaterEqual,
|
||||
Question,
|
||||
Newline,
|
||||
Indent,
|
||||
Dedent,
|
||||
Eof,
|
||||
}
|
||||
|
||||
#[derive(Debug, Clone, Copy, PartialEq, Eq)]
|
||||
pub enum Keyword {
|
||||
Async,
|
||||
Else,
|
||||
Fn,
|
||||
For,
|
||||
If,
|
||||
Impl,
|
||||
Import,
|
||||
In,
|
||||
Let,
|
||||
Match,
|
||||
Pub,
|
||||
Return,
|
||||
Struct,
|
||||
Use,
|
||||
Var,
|
||||
While,
|
||||
}
|
||||
|
||||
impl Keyword {
|
||||
pub fn from_ident(value: &str) -> Option<Self> {
|
||||
match value {
|
||||
"async" => Some(Self::Async),
|
||||
"else" => Some(Self::Else),
|
||||
"fn" => Some(Self::Fn),
|
||||
"for" => Some(Self::For),
|
||||
"if" => Some(Self::If),
|
||||
"impl" => Some(Self::Impl),
|
||||
"import" => Some(Self::Import),
|
||||
"in" => Some(Self::In),
|
||||
"let" => Some(Self::Let),
|
||||
"match" => Some(Self::Match),
|
||||
"pub" => Some(Self::Pub),
|
||||
"return" => Some(Self::Return),
|
||||
"struct" => Some(Self::Struct),
|
||||
"use" => Some(Self::Use),
|
||||
"var" => Some(Self::Var),
|
||||
"while" => Some(Self::While),
|
||||
_ => None,
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
impl fmt::Display for TokenKind {
|
||||
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
|
||||
match self {
|
||||
TokenKind::Identifier(name) => write!(f, "identifier({name})"),
|
||||
TokenKind::Integer(value) => write!(f, "integer({value})"),
|
||||
TokenKind::String(value) => write!(f, "string({value})"),
|
||||
TokenKind::Keyword(keyword) => write!(f, "keyword({keyword:?})"),
|
||||
other => write!(f, "{other:?}"),
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
10
crates/nxc-runtime/Cargo.toml
Normal file
10
crates/nxc-runtime/Cargo.toml
Normal file
@@ -0,0 +1,10 @@
|
||||
[package]
|
||||
name = "nxc-runtime"
|
||||
version.workspace = true
|
||||
edition.workspace = true
|
||||
license.workspace = true
|
||||
authors.workspace = true
|
||||
|
||||
[lib]
|
||||
path = "src/lib.rs"
|
||||
|
||||
10
crates/nxc-runtime/src/lib.rs
Normal file
10
crates/nxc-runtime/src/lib.rs
Normal file
@@ -0,0 +1,10 @@
|
||||
//! NexaCore runtime scaffolding.
|
||||
//!
|
||||
//! TODO:
|
||||
//! - async scheduler
|
||||
//! - string and collection runtime
|
||||
//! - HTTP primitives
|
||||
//! - PostgreSQL client ABI
|
||||
|
||||
pub const RUNTIME_VERSION: &str = "0.1.0";
|
||||
|
||||
557
docs/nexacore-foundation.md
Normal file
557
docs/nexacore-foundation.md
Normal file
@@ -0,0 +1,557 @@
|
||||
# NexaCore Foundation
|
||||
|
||||
## 1. Language Vision
|
||||
|
||||
### What NexaCore is
|
||||
|
||||
NexaCore is a compiled backend language designed for APIs, database-heavy services, internal platforms, and system daemons. The language aims to keep code visually simple and readable while enforcing stronger correctness guarantees than Python. It prioritizes predictable performance, structured concurrency, explicit error handling, and batteries-included backend tooling.
|
||||
|
||||
### Target users
|
||||
|
||||
- backend engineers building REST APIs and service layers
|
||||
- platform teams building internal tools and service orchestration
|
||||
- companies replacing Python microservices that have grown too dynamic or too slow
|
||||
- teams that want a simpler language than Rust for application-level backend work
|
||||
|
||||
### Why it is better suited for backend systems than Python
|
||||
|
||||
- compiled deployment artifact instead of shipping source trees and interpreter environments
|
||||
- static typing with local inference catches failures before production
|
||||
- explicit error model improves reliability for service code
|
||||
- structured async runtime designed around network and database workloads
|
||||
- first-class PostgreSQL and HTTP support as standard capabilities, not bolted-on frameworks
|
||||
- smaller operational surface for packaging, startup, and deployment
|
||||
- stronger encapsulation and harder-to-read binaries than plain source code shipping
|
||||
|
||||
### Design goals
|
||||
|
||||
- keep syntax approachable and easy to scan
|
||||
- optimize for backend productivity, not language cleverness
|
||||
- compile to efficient deployable artifacts
|
||||
- make async IO, HTTP routing, and PostgreSQL first-class
|
||||
- provide strong type safety with low annotation burden
|
||||
- support Linux first with a path to Windows later
|
||||
- keep tooling simple: new, build, run, test, fmt, add, doc
|
||||
|
||||
### Non-goals
|
||||
|
||||
- replacing C or Rust for kernel, driver, or embedded programming
|
||||
- full zero-cost manual memory control in the MVP
|
||||
- metaprogramming-heavy language features in version one
|
||||
- multiple inheritance, operator overloading, or macros in the MVP
|
||||
- universal frontend or browser runtime in the MVP
|
||||
|
||||
## 2. Language Features
|
||||
|
||||
### MVP feature set
|
||||
|
||||
- Variables: immutable by default with `let`, mutable with `var`
|
||||
- Functions: named functions with return types and local type inference
|
||||
- Structs: product types with methods and visibility control
|
||||
- Modules/imports: file-based modules with package namespaces
|
||||
- If/else: expression-friendly branching
|
||||
- Match: exhaustive matching on enums, literals, and guards
|
||||
- Loops: `for`, `while`, and iterator-based traversal
|
||||
- Error handling: `Result<T, E>`, `?` propagation, `defer` later
|
||||
- Async/await: structured async for network and database operations
|
||||
- Database access: typed query APIs and row-to-struct mapping
|
||||
- HTTP API: built-in routing and request/response abstractions in stdlib
|
||||
- Package management: first-party package manifest and lockfile
|
||||
|
||||
### Typing model
|
||||
|
||||
Static typing with local inference is the right MVP choice. NexaCore should infer obvious local types while requiring type signatures on public functions, struct fields, and externally visible module boundaries. This gives Python-like authoring speed without Python’s runtime ambiguity.
|
||||
|
||||
### Memory model
|
||||
|
||||
The MVP should use automatic memory management through reference-counted heap objects plus arena ownership inside the compiler and runtime internals. This is simpler than full tracing GC and easier to implement safely than Rust-like borrow checking in a new language. Long term, the language can evolve toward region-based optimization and escape analysis.
|
||||
|
||||
### Concurrency model
|
||||
|
||||
Structured async concurrency for IO-bound backend work is the default. The language runtime owns the async scheduler and task model. Shared-state threads are not part of the first language surface; background workers and task spawning go through runtime primitives.
|
||||
|
||||
## 3. Syntax Design
|
||||
|
||||
NexaCore should be indentation-aware for readability but use explicit block starters so the parser remains simple and code stays visually structured. A colon starts a block, and indentation ends it.
|
||||
|
||||
### Hello world
|
||||
|
||||
```nexa
|
||||
fn main() -> Int:
|
||||
print("Hello, NexaCore")
|
||||
```
|
||||
|
||||
### Variables
|
||||
|
||||
```nexa
|
||||
let host = "127.0.0.1"
|
||||
var port: Int = 8080
|
||||
let debug = true
|
||||
```
|
||||
|
||||
### Functions
|
||||
|
||||
```nexa
|
||||
fn add(a: Int, b: Int) -> Int:
|
||||
a + b
|
||||
```
|
||||
|
||||
### Structs and methods
|
||||
|
||||
```nexa
|
||||
pub struct User:
|
||||
id: Int
|
||||
email: String
|
||||
|
||||
impl User:
|
||||
fn display(self) -> String:
|
||||
"{self.id}:{self.email}"
|
||||
```
|
||||
|
||||
### REST API endpoint
|
||||
|
||||
```nexa
|
||||
use web.http.{App, Request, Response}
|
||||
|
||||
fn health(_req: Request) -> Response:
|
||||
Response.json({
|
||||
"status": "ok"
|
||||
})
|
||||
```
|
||||
|
||||
### PostgreSQL query
|
||||
|
||||
```nexa
|
||||
use db.postgres.{Pool, query}
|
||||
|
||||
async fn load_user(pool: Pool, id: Int) -> Result<User, DbError>:
|
||||
let row = await query(pool,
|
||||
"select id, email from users where id = $1",
|
||||
[id]
|
||||
)?.one()
|
||||
|
||||
row.into<User>()
|
||||
```
|
||||
|
||||
### Async function
|
||||
|
||||
```nexa
|
||||
async fn fetch_profile(user_id: Int) -> Result<Profile, AppError>:
|
||||
let profile = await profiles.load(user_id)?
|
||||
profile
|
||||
```
|
||||
|
||||
### Error handling
|
||||
|
||||
```nexa
|
||||
fn parse_port(raw: String) -> Result<Int, ConfigError>:
|
||||
match raw.to_int():
|
||||
ok(value) => value
|
||||
err(_) => fail ConfigError.invalid("PORT must be numeric")
|
||||
```
|
||||
|
||||
### Imports and modules
|
||||
|
||||
```nexa
|
||||
use core.env
|
||||
use web.http.{App, Response}
|
||||
use db.postgres.Pool
|
||||
```
|
||||
|
||||
## 4. Technical Architecture
|
||||
|
||||
### Compiler pipeline
|
||||
|
||||
1. Lexer
|
||||
Converts UTF-8 source into tokens, including indentation-sensitive block tokens.
|
||||
2. Parser
|
||||
Builds an AST from tokens using a recursive descent parser.
|
||||
3. AST
|
||||
Stores module declarations, items, statements, expressions, types, and spans.
|
||||
4. Semantic analyzer
|
||||
Resolves names, module symbols, scopes, and visibility.
|
||||
5. Type checker
|
||||
Infers local types, validates function signatures, and resolves generic instantiations.
|
||||
6. HIR and MIR
|
||||
HIR for resolved source-level structure, MIR for lowered control flow and typed operations.
|
||||
7. Backend code generation
|
||||
Emit portable C in the MVP, then compile with a system C compiler.
|
||||
8. Binary output
|
||||
Native executable or shared object linked with the NexaCore runtime.
|
||||
|
||||
### Module system
|
||||
|
||||
- one package contains a `nexa.toml` manifest
|
||||
- source files live in `src/`
|
||||
- `src/main.nx` builds an application binary
|
||||
- `src/lib.nx` builds a library package
|
||||
- `use` imports symbol paths
|
||||
- package dependencies resolve through a first-party registry later; local path dependencies first
|
||||
|
||||
### Package manager
|
||||
|
||||
The `nexacore` CLI owns package management:
|
||||
|
||||
- `nexacore new api-service`
|
||||
- `nexacore add postgres`
|
||||
- `nexacore build`
|
||||
- `nexacore test`
|
||||
|
||||
Manifest:
|
||||
|
||||
```toml
|
||||
[package]
|
||||
name = "orders-api"
|
||||
version = "0.1.0"
|
||||
edition = "2026"
|
||||
|
||||
[dependencies]
|
||||
postgres = "0.1"
|
||||
http = "0.1"
|
||||
```
|
||||
|
||||
### Standard library layout
|
||||
|
||||
- `core`: strings, collections, io, env, time, result, option
|
||||
- `async`: tasks, channels, timers
|
||||
- `web`: http server, routing, requests, responses, middleware
|
||||
- `db`: postgres client, pooling, migrations later
|
||||
- `json`: encode, decode, schema helpers
|
||||
- `auth`: jwt and password utilities
|
||||
- `log`: structured logging
|
||||
|
||||
### Best MVP implementation path
|
||||
|
||||
The best MVP path is `NexaCore -> AST/HIR/MIR -> C -> native binary`.
|
||||
|
||||
### Why C is the best first backend
|
||||
|
||||
- easier to implement than a full LLVM backend
|
||||
- produces native binaries immediately
|
||||
- lets the team focus first on language semantics, standard library shape, and runtime
|
||||
- easier to debug generated output during compiler bring-up
|
||||
- keeps a clean migration path to LLVM or a direct machine-code backend later
|
||||
- avoids the operational and implementation overhead of designing a serious VM before validating the language
|
||||
|
||||
### Why not LLVM first
|
||||
|
||||
LLVM is powerful, but it significantly increases implementation surface area early. For an MVP language team, front-end maturity and runtime design are bigger risks than instruction selection.
|
||||
|
||||
### Why not a bytecode VM first
|
||||
|
||||
A VM is attractive for portability, but it weakens the deployment and code-protection story and requires designing both a language and a production runtime execution engine at once.
|
||||
|
||||
## 5. Security and Code Protection
|
||||
|
||||
No compiled format is impossible to reverse engineer. NexaCore should aim for strong practical resistance, not absolute secrecy.
|
||||
|
||||
### Realistic protection model
|
||||
|
||||
- compile to native binaries for deployment
|
||||
- strip symbols in release mode
|
||||
- minimize embedded reflection metadata
|
||||
- avoid preserving source-like names unless needed for diagnostics
|
||||
- separate debug symbols from production artifacts
|
||||
- support link-time optimization and dead-code elimination
|
||||
- optionally obfuscate private symbol names in hardened builds
|
||||
- keep secrets out of binaries; load them from environment or secret managers
|
||||
|
||||
### Tradeoffs
|
||||
|
||||
- native binaries are materially harder to inspect than source, but still reversible with enough effort
|
||||
- bytecode is easier to decompile than optimized native code
|
||||
- aggressive obfuscation complicates debugging and incident response
|
||||
- encrypted assets help with packaged resources, not code secrecy after runtime decryption
|
||||
|
||||
### Recommended release modes
|
||||
|
||||
- `debug`: symbols and source maps kept
|
||||
- `release`: optimized and stripped
|
||||
- `release-hardened`: stripped, symbol-minimized, optional control-flow obfuscation hooks later
|
||||
|
||||
## 6. Web Backend Standard Library
|
||||
|
||||
### Core backend SDK modules
|
||||
|
||||
- HTTP server: TCP listener, HTTP parser integration, request lifecycle
|
||||
- Routing: method/path routing, path params, nested groups
|
||||
- Middleware: auth, logging, recovery, tracing
|
||||
- JSON: serializer and parser with typed model mapping
|
||||
- Environment config: `.env` loading later, env parsing, typed config helpers
|
||||
- PostgreSQL driver: async client, pool, prepared statements
|
||||
- Logging: structured logger with JSON output option
|
||||
- File handling: streams, safe path utilities, upload helpers later
|
||||
- JWT/auth helpers: token signing, verification, password hashing later
|
||||
- Background jobs: runtime task spawning, scheduling, queues later
|
||||
- WebSocket: defer until after HTTP core is stable
|
||||
|
||||
## 7. PostgreSQL Integration
|
||||
|
||||
NexaCore should treat PostgreSQL as a first-class backend primitive. The syntax stays explicit, but the API should be much tighter than Python ORMs and less ceremony-heavy than many async driver stacks.
|
||||
|
||||
### Opening a connection
|
||||
|
||||
```nexa
|
||||
use db.postgres.Pool
|
||||
|
||||
let pool = Pool.connect(env.require("DATABASE_URL"), max: 20)?
|
||||
```
|
||||
|
||||
### Select queries
|
||||
|
||||
```nexa
|
||||
let users = await pool.query<User>(
|
||||
"select id, email from users order by id"
|
||||
)?
|
||||
```
|
||||
|
||||
### Inserts and updates
|
||||
|
||||
```nexa
|
||||
let inserted = await pool.exec(
|
||||
"insert into users(email) values($1)",
|
||||
["a@example.com"]
|
||||
)?
|
||||
```
|
||||
|
||||
### Transactions
|
||||
|
||||
```nexa
|
||||
let tx = await pool.begin()?
|
||||
await tx.exec("update accounts set balance = balance - $1 where id = $2", [10, from])?
|
||||
await tx.exec("update accounts set balance = balance + $1 where id = $2", [10, to])?
|
||||
await tx.commit()?
|
||||
```
|
||||
|
||||
### Mapping rows to structs
|
||||
|
||||
```nexa
|
||||
struct User:
|
||||
id: Int
|
||||
email: String
|
||||
|
||||
let user = await pool.query_one<User>(
|
||||
"select id, email from users where id = $1",
|
||||
[id]
|
||||
)?
|
||||
```
|
||||
|
||||
### Connection pooling
|
||||
|
||||
```nexa
|
||||
let pool = Pool.connect(url, max: 32, min: 4, idle_timeout_sec: 30)?
|
||||
```
|
||||
|
||||
### Async queries
|
||||
|
||||
All PostgreSQL APIs are async-first. Blocking database access is not part of the standard application path.
|
||||
|
||||
## 8. Developer Experience
|
||||
|
||||
### CLI commands
|
||||
|
||||
- `nexacore new <name>`: create a new app or library
|
||||
- `nexacore build`: compile package to binary or library
|
||||
- `nexacore run`: build and execute
|
||||
- `nexacore test`: run language and package tests
|
||||
- `nexacore fmt`: format source code
|
||||
- `nexacore add <package>`: add dependency
|
||||
- `nexacore doc`: build documentation
|
||||
|
||||
### Example backend project layout
|
||||
|
||||
```text
|
||||
orders-api/
|
||||
nexa.toml
|
||||
src/
|
||||
main.nx
|
||||
api/
|
||||
routes.nx
|
||||
users.nx
|
||||
db/
|
||||
models.nx
|
||||
queries.nx
|
||||
config.nx
|
||||
tests/
|
||||
users_test.nx
|
||||
```
|
||||
|
||||
## 9. Starter Implementation Plan
|
||||
|
||||
### Phase 1: language spec MVP
|
||||
|
||||
- freeze core syntax rules
|
||||
- define token grammar and block structure
|
||||
- define AST and type system MVP
|
||||
- define package manifest format
|
||||
|
||||
### Phase 2: lexer/parser/AST
|
||||
|
||||
- implement token definitions
|
||||
- implement indentation-aware lexer
|
||||
- implement parser for modules, functions, structs, statements, and expressions
|
||||
- snapshot parser test fixtures
|
||||
|
||||
### Phase 3: semantic analysis
|
||||
|
||||
- name resolution
|
||||
- scope tracking
|
||||
- visibility rules
|
||||
- type inference for locals
|
||||
- public API type validation
|
||||
|
||||
### Phase 4: code generation
|
||||
|
||||
- HIR and MIR lowering
|
||||
- C backend emitter
|
||||
- runtime ABI definition
|
||||
- compile driver invoking system C compiler
|
||||
|
||||
### Phase 5: runtime and stdlib
|
||||
|
||||
- string and collection runtime
|
||||
- result/option representations
|
||||
- async task scheduler
|
||||
- IO primitives
|
||||
|
||||
### Phase 6: PostgreSQL and HTTP framework
|
||||
|
||||
- async socket and HTTP runtime
|
||||
- routing and JSON helpers
|
||||
- PostgreSQL client and connection pooling
|
||||
- example backend service
|
||||
|
||||
### Phase 7: package manager and tooling
|
||||
|
||||
- manifest parser
|
||||
- dependency resolver
|
||||
- formatter
|
||||
- test runner
|
||||
- docs generator
|
||||
|
||||
## 10. Repository Structure
|
||||
|
||||
```text
|
||||
NexaCore/
|
||||
Cargo.toml
|
||||
README.md
|
||||
docs/
|
||||
nexacore-foundation.md
|
||||
crates/
|
||||
nxc-cli/
|
||||
nxc-driver/
|
||||
nxc-frontend/
|
||||
nxc-runtime/
|
||||
stdlib/
|
||||
core/
|
||||
db/
|
||||
http/
|
||||
packages/
|
||||
examples/
|
||||
backend-api/
|
||||
tests/
|
||||
compiler/
|
||||
integration/
|
||||
tools/
|
||||
```
|
||||
|
||||
## 11. MVP Code Generation
|
||||
|
||||
Bootstrapping in Rust is the best choice:
|
||||
|
||||
- excellent fit for compiler engineering
|
||||
- strong enums and pattern matching for token/AST modeling
|
||||
- memory safety for a long-lived systems project
|
||||
- good ecosystem for CLI, testing, and later LLVM/C toolchain integrations
|
||||
|
||||
The starter code in this repo includes:
|
||||
|
||||
- token definitions
|
||||
- lexer
|
||||
- AST nodes
|
||||
- parser skeleton
|
||||
- compiler driver
|
||||
- CLI entrypoint
|
||||
|
||||
## 12. Example NexaCore Program
|
||||
|
||||
```nexa
|
||||
use core.env
|
||||
use db.postgres.Pool
|
||||
use web.http.{App, Response}
|
||||
|
||||
struct AppState:
|
||||
pool: Pool
|
||||
|
||||
async fn health(state: AppState) -> Response:
|
||||
let version = env.get("APP_VERSION").or("dev")
|
||||
let row = await state.pool.query_one<Map>(
|
||||
"select now() as now"
|
||||
)?
|
||||
|
||||
Response.json({
|
||||
"status": "ok",
|
||||
"version": version,
|
||||
"database_time": row["now"]
|
||||
})
|
||||
|
||||
async fn main() -> Result<Void, AppError>:
|
||||
let database_url = env.require("DATABASE_URL")?
|
||||
let port = env.get("PORT").or("8080").to_int()?
|
||||
let pool = Pool.connect(database_url, max: 16)?
|
||||
|
||||
let app = App.new()
|
||||
.state(AppState { pool: pool })
|
||||
.get("/health", health)
|
||||
|
||||
await app.listen("0.0.0.0", port)?
|
||||
```
|
||||
|
||||
## 13. Codex Execution Rules
|
||||
|
||||
- prioritize correctness over false completeness
|
||||
- implement compileable starter code, not pseudocode disguised as finished work
|
||||
- leave clear TODOs where the compiler is intentionally incomplete
|
||||
- keep package boundaries aligned with the compiler pipeline
|
||||
- avoid inventing third-party dependencies unless they are explicitly added
|
||||
|
||||
## 14. Final Deliverable
|
||||
|
||||
### Recommended architecture choice
|
||||
|
||||
Rust front-end compiler with a C backend for the MVP.
|
||||
|
||||
### MVP scope
|
||||
|
||||
- parser and type-checked core language
|
||||
- C code generation
|
||||
- native binary build flow on Linux
|
||||
- minimal runtime
|
||||
- first-party HTTP and PostgreSQL runtime modules
|
||||
|
||||
### First coding step
|
||||
|
||||
Build the front-end pipeline end to end for a single file:
|
||||
lex -> parse -> AST dump -> diagnostics.
|
||||
|
||||
### First files to generate
|
||||
|
||||
- workspace manifest
|
||||
- `nxc-frontend` token/lexer/parser/AST
|
||||
- `nxc-driver` compile pipeline
|
||||
- `nxc-cli` entrypoint
|
||||
- example `main.nx`
|
||||
|
||||
### Build order
|
||||
|
||||
1. tokens and spans
|
||||
2. lexer
|
||||
3. AST
|
||||
4. parser
|
||||
5. diagnostics
|
||||
6. semantic resolver
|
||||
7. type checker
|
||||
8. HIR/MIR lowering
|
||||
9. C backend
|
||||
10. runtime and stdlib
|
||||
|
||||
30
examples/backend-api/main.nx
Normal file
30
examples/backend-api/main.nx
Normal file
@@ -0,0 +1,30 @@
|
||||
use core.env
|
||||
use db.postgres.Pool
|
||||
use web.http.{App, Response}
|
||||
|
||||
struct AppState:
|
||||
pool: Pool
|
||||
|
||||
async fn health(state: AppState) -> Response:
|
||||
let version = env.get("APP_VERSION").or("dev")
|
||||
let row = await state.pool.query_one<Map>(
|
||||
"select now() as now"
|
||||
)?
|
||||
|
||||
Response.json({
|
||||
"status": "ok",
|
||||
"version": version,
|
||||
"database_time": row["now"]
|
||||
})
|
||||
|
||||
async fn main() -> Result<Void, AppError>:
|
||||
let database_url = env.require("DATABASE_URL")?
|
||||
let port = env.get("PORT").or("8080").to_int()?
|
||||
let pool = Pool.connect(database_url, max: 16)?
|
||||
|
||||
let app = App.new()
|
||||
.state(AppState { pool: pool })
|
||||
.get("/health", health)
|
||||
|
||||
await app.listen("0.0.0.0", port)?
|
||||
|
||||
Reference in New Issue
Block a user