Add sketch file; change tokenize return type to Result.

This commit is contained in:
Jesse Brault 2024-11-24 23:02:45 -06:00
parent 88119e3001
commit 07e0e2ae73
2 changed files with 188 additions and 13 deletions

174
sketching/sketch.dm Normal file
View File

@ -0,0 +1,174 @@
ns nonavosa_rail
pub mod train {
pub int Train {
name: String
primary_color: Color
destination: String
fn blow_horn(): IO
fn set_volume_by(factor: Number)
}
pub enum Color {
White, Blue, Red
}
pub fn int Printer {
(message: String): IO
}
pub fn create(props: Props<Train> & { printer: Printer }): Train = SimpleTrain { ...props } with { volume = 80.0 }
abs impl : Train {
#get #set
fld volume: Double
}
impl SimpleTrain : AbstractTrain {
printer: Printer
override fn set_volume(volume) {
if (volume < 0 || volume > 100) {
throw IllegalArgumentException {
message: `Volume $volume is outside of bounds 0-100.`,
argument: volume
}
}
this.volume = volume
}
impl fn blow_horn() = printer(`Train named $name is blowing it's horn at volume $volume!`)
-> { s => s.append('Hello from SimpleTrain!') }
impl fn set_volume_by(factor) = set_volume(volume * factor)
}
}
fn main(args) {
let train = train::create {
name: 'Test Train',
primary_color: train::Color::Blue,
destination: 'Nonavosa Central',
printer: { msg => println(msg) }
}
train.set_volume_by(1.1)
train.blow_horn()
let anonymous_train = train::Train {
name: 'Anonymous Train',
primary_color: train::Color::Red,
destination: 'Nonavosa North',
impl fn blow_horn() = println('Choo Choo!')
impl fn set_volume_by = throw MethodNotSupported
}
try {
anonymous_train.set_volume_by(3.0)
} catch (e: MethodNotSupported) {
println e.message
}
anonymous_train.blow_horn() -> { println('After blow horn, assuming nothing went wrong before') }
}
// some other file
fn log_printer(message) impl nonavosa_rail::train::Printer = msg =>
io::open_file('log.txt', io::Mode::Append) ->
{ f => f.append(msg) } ->
io::close_file
fn main() {
nonavosa_rail::main(log_printer)
}
// io.dm
ns io
decl extern fn println(s: String)
#target[lua]
decl extern fn _open_file(filename: String, mode: String): LuaTable
#target[lua]
pub fn open_file(filename: String, mode: Mode): IO<File, LuaError> {
try {
let lua_file_handle = lua {% fs.open(${filename}, ${mode.as_lua_string()}) %}
Success(LuaFile { lua_file_handle })
} catch (e: LuaError) {
Failure(e)
}
}
#target[lua]
pub fn close_file(file): impl IO<File, LuaError> = file.close()
pub int File : Stream
#target[lua]
impl LuaFile : File {
#get
fld lua_file_handle
impl fn append(data) {
try {
lua {% ${lua_file_handle}.write(${data}) %}
Success(self)
} catch (e: LuaError) {
Failure(e)
}
}
impl fn close() {
try {
lua {% ${lua_file_handle}.close() %}
Success
} catch (e: LuaError) {
Failure(e)
}
}
}
pub enum Mode {
Append, Write, Read
}
pub enum IO<S: Stream = Stream, E?> : Chainable<IO<S, E>> {
Success(stream?: S) {
impl fn call() {
try {
do_call(stream)
} catch (e: E) {
Failure(e)
}
}
},
Failure(_) {
impl fn call() = self
};
fn <N: Stream> do_call(stream: S): IO<N, E>
}
pub int Stream {
fn append(data: String): IO<Self>
fn close(): IO
}
// chain.dm
pub fn int Chainable<N> {
(): N
}

View File

@ -31,7 +31,7 @@ pub enum Token {
Abstract Abstract
} }
pub fn tokenize(input: &String) -> Vec<Token> { pub fn tokenize(input: &String) -> Result<Vec<Token>, &'static str> {
let mut tokens: Vec<Token> = Vec::new(); let mut tokens: Vec<Token> = Vec::new();
let mut peekable = input.chars().peekable(); let mut peekable = input.chars().peekable();
while let Some(c) = peekable.next() { while let Some(c) = peekable.next() {
@ -80,7 +80,7 @@ pub fn tokenize(input: &String) -> Vec<Token> {
match count { match count {
1 => tokens.push(Token::Dot), 1 => tokens.push(Token::Dot),
3 => tokens.push(Token::Ellipsis), 3 => tokens.push(Token::Ellipsis),
_ => panic!("Too many dots.") _ => return Err("Unexpected number of tokens after '.'"),
} }
} }
_ => { _ => {
@ -90,7 +90,7 @@ pub fn tokenize(input: &String) -> Vec<Token> {
} }
} }
} }
tokens Ok(tokens)
} }
fn match_identifier_or_keyword(start_char: char, peekable: &mut Peekable<Chars>) -> Option<Token> { fn match_identifier_or_keyword(start_char: char, peekable: &mut Peekable<Chars>) -> Option<Token> {
@ -143,7 +143,7 @@ mod tests {
#[test] #[test]
fn simple_ns() { fn simple_ns() {
let result = tokenize(&String::from("ns simple")); let result = tokenize(&String::from("ns simple")).unwrap();
assert_eq!(Token::Namespace, result[0]); assert_eq!(Token::Namespace, result[0]);
assert_eq!(Token::Identifier(String::from("simple")), result[1]); assert_eq!(Token::Identifier(String::from("simple")), result[1]);
} }
@ -153,14 +153,14 @@ mod tests {
let mut src_file = File::open(Path::new("test-data/lexer/simple_ns.dm")).unwrap(); let mut src_file = File::open(Path::new("test-data/lexer/simple_ns.dm")).unwrap();
let mut src = String::new(); let mut src = String::new();
let _ = src_file.read_to_string(&mut src); let _ = src_file.read_to_string(&mut src);
let result = tokenize(&src); let result = tokenize(&src).unwrap();
assert_eq!(Token::Namespace, result[0]); assert_eq!(Token::Namespace, result[0]);
assert_eq!(Token::Identifier(String::from("simple")), result[1]); assert_eq!(Token::Identifier(String::from("simple")), result[1]);
} }
#[test] #[test]
fn pub_mod_simple() { fn pub_mod_simple() {
let result = tokenize(&String::from("pub mod simple")); let result = tokenize(&String::from("pub mod simple")).unwrap();
assert_eq!(Token::Public, result[0]); assert_eq!(Token::Public, result[0]);
assert_eq!(Token::Module, result[1]); assert_eq!(Token::Module, result[1]);
assert_eq!(Token::Identifier(String::from("simple")), result[2]); assert_eq!(Token::Identifier(String::from("simple")), result[2]);
@ -168,21 +168,22 @@ mod tests {
#[test] #[test]
fn curly_open_and_close() { fn curly_open_and_close() {
let result = tokenize(&String::from("{ }")); let result = tokenize(&String::from("{ }")).unwrap();
assert_eq!(Token::CurlyOpen, result[0]); assert_eq!(Token::CurlyOpen, result[0]);
assert_eq!(Token::CurlyClose, result[1]); assert_eq!(Token::CurlyClose, result[1]);
} }
#[test] #[test]
fn simple_int() { fn simple_int() {
let result = tokenize(&String::from("int simple")); let result = tokenize(&String::from("int simple")).unwrap();
assert_eq!(Token::Interface, result[0]); assert_eq!(Token::Interface, result[0]);
assert_eq!(Token::Identifier(String::from("simple")), result[1]); assert_eq!(Token::Identifier(String::from("simple")), result[1]);
} }
#[test] #[test]
fn ns_pub_mod_simple() { fn ns_pub_mod_simple() {
let result = tokenize(&String::from("ns simple_ns\npub mod simple { }")); let result = tokenize(&String::from("ns simple_ns\npub mod simple { }"))
.unwrap();
assert_eq!(Token::Namespace, result[0]); assert_eq!(Token::Namespace, result[0]);
assert_eq!(Token::Identifier(String::from("simple_ns")), result[1]); assert_eq!(Token::Identifier(String::from("simple_ns")), result[1]);
assert_eq!(Token::Public, result[2]); assert_eq!(Token::Public, result[2]);
@ -194,14 +195,14 @@ mod tests {
#[test] #[test]
fn curly_open_and_close_no_space() { fn curly_open_and_close_no_space() {
let result = tokenize(&String::from("{}")); let result = tokenize(&String::from("{}")).unwrap();
assert_eq!(Token::CurlyOpen, result[0]); assert_eq!(Token::CurlyOpen, result[0]);
assert_eq!(Token::CurlyClose, result[1]); assert_eq!(Token::CurlyClose, result[1]);
} }
#[test] #[test]
fn interface_function() { fn interface_function() {
let result = tokenize(&String::from("fn test(): Test")); let result = tokenize(&String::from("fn test(): Test")).unwrap();
assert_eq!(Token::Function, result[0]); assert_eq!(Token::Function, result[0]);
assert_eq!(Token::Identifier(String::from("test")), result[1]); assert_eq!(Token::Identifier(String::from("test")), result[1]);
assert_eq!(Token::ParenOpen, result[2]); assert_eq!(Token::ParenOpen, result[2]);
@ -212,7 +213,7 @@ mod tests {
#[test] #[test]
fn interface_prop() { fn interface_prop() {
let result = tokenize(&String::from("test: Test")); let result = tokenize(&String::from("test: Test")).unwrap();
assert_eq!(Token::Identifier(String::from("test")), result[0]); assert_eq!(Token::Identifier(String::from("test")), result[0]);
assert_eq!(Token::Colon, result[1]); assert_eq!(Token::Colon, result[1]);
assert_eq!(Token::Identifier(String::from("Test")), result[2]); assert_eq!(Token::Identifier(String::from("Test")), result[2]);
@ -220,7 +221,7 @@ mod tests {
#[test] #[test]
fn enum_decl() { fn enum_decl() {
let result = tokenize(&String::from("enum Test {}")); let result = tokenize(&String::from("enum Test {}")).unwrap();
assert_eq!(Token::Enum, result[0]); assert_eq!(Token::Enum, result[0]);
assert_eq!(Token::Identifier(String::from("Test")), result[1]); assert_eq!(Token::Identifier(String::from("Test")), result[1]);
assert_eq!(Token::CurlyOpen, result[2]); assert_eq!(Token::CurlyOpen, result[2]);