diff --git a/sketching/sketch.dm b/sketching/sketch.dm new file mode 100644 index 0000000..efd5ba4 --- /dev/null +++ b/sketching/sketch.dm @@ -0,0 +1,174 @@ +ns nonavosa_rail + +pub mod train { + + pub int Train { + name: String + primary_color: Color + destination: String + + fn blow_horn(): IO + fn set_volume_by(factor: Number) + } + + pub enum Color { + White, Blue, Red + } + + pub fn int Printer { + (message: String): IO + } + + pub fn create(props: Props & { printer: Printer }): Train = SimpleTrain { ...props } with { volume = 80.0 } + + abs impl : Train { + + #get #set + fld volume: Double + + } + + impl SimpleTrain : AbstractTrain { + + printer: Printer + + override fn set_volume(volume) { + if (volume < 0 || volume > 100) { + throw IllegalArgumentException { + message: `Volume $volume is outside of bounds 0-100.`, + argument: volume + } + } + + this.volume = volume + } + + impl fn blow_horn() = printer(`Train named $name is blowing it's horn at volume $volume!`) + -> { s => s.append('Hello from SimpleTrain!') } + + impl fn set_volume_by(factor) = set_volume(volume * factor) + + } + +} + +fn main(args) { + let train = train::create { + name: 'Test Train', + primary_color: train::Color::Blue, + destination: 'Nonavosa Central', + printer: { msg => println(msg) } + } + train.set_volume_by(1.1) + train.blow_horn() + + let anonymous_train = train::Train { + name: 'Anonymous Train', + primary_color: train::Color::Red, + destination: 'Nonavosa North', + + impl fn blow_horn() = println('Choo Choo!') + impl fn set_volume_by = throw MethodNotSupported + } + + try { + anonymous_train.set_volume_by(3.0) + } catch (e: MethodNotSupported) { + println e.message + } + + anonymous_train.blow_horn() -> { println('After blow horn, assuming nothing went wrong before') } +} + +// some other file + +fn log_printer(message) impl nonavosa_rail::train::Printer = msg => + io::open_file('log.txt', io::Mode::Append) -> + { f => f.append(msg) } -> + io::close_file + +fn main() { + nonavosa_rail::main(log_printer) +} + +// io.dm + +ns io + +decl extern fn println(s: String) + +#target[lua] +decl extern fn _open_file(filename: String, mode: String): LuaTable + +#target[lua] +pub fn open_file(filename: String, mode: Mode): IO { + try { + let lua_file_handle = lua {% fs.open(${filename}, ${mode.as_lua_string()}) %} + Success(LuaFile { lua_file_handle }) + } catch (e: LuaError) { + Failure(e) + } +} + +#target[lua] +pub fn close_file(file): impl IO = file.close() + +pub int File : Stream + +#target[lua] +impl LuaFile : File { + + #get + fld lua_file_handle + + impl fn append(data) { + try { + lua {% ${lua_file_handle}.write(${data}) %} + Success(self) + } catch (e: LuaError) { + Failure(e) + } + } + + impl fn close() { + try { + lua {% ${lua_file_handle}.close() %} + Success + } catch (e: LuaError) { + Failure(e) + } + } + +} + +pub enum Mode { + Append, Write, Read +} + +pub enum IO : Chainable> { + Success(stream?: S) { + impl fn call() { + try { + do_call(stream) + } catch (e: E) { + Failure(e) + } + } + }, + Failure(_) { + impl fn call() = self + }; + + fn do_call(stream: S): IO +} + +pub int Stream { + fn append(data: String): IO + fn close(): IO +} + +// chain.dm + +pub fn int Chainable { + (): N +} diff --git a/src/lexer/mod.rs b/src/lexer/mod.rs index 8f07ee3..00b2218 100644 --- a/src/lexer/mod.rs +++ b/src/lexer/mod.rs @@ -31,7 +31,7 @@ pub enum Token { Abstract } -pub fn tokenize(input: &String) -> Vec { +pub fn tokenize(input: &String) -> Result, &'static str> { let mut tokens: Vec = Vec::new(); let mut peekable = input.chars().peekable(); while let Some(c) = peekable.next() { @@ -80,7 +80,7 @@ pub fn tokenize(input: &String) -> Vec { match count { 1 => tokens.push(Token::Dot), 3 => tokens.push(Token::Ellipsis), - _ => panic!("Too many dots.") + _ => return Err("Unexpected number of tokens after '.'"), } } _ => { @@ -90,7 +90,7 @@ pub fn tokenize(input: &String) -> Vec { } } } - tokens + Ok(tokens) } fn match_identifier_or_keyword(start_char: char, peekable: &mut Peekable) -> Option { @@ -143,7 +143,7 @@ mod tests { #[test] fn simple_ns() { - let result = tokenize(&String::from("ns simple")); + let result = tokenize(&String::from("ns simple")).unwrap(); assert_eq!(Token::Namespace, result[0]); assert_eq!(Token::Identifier(String::from("simple")), result[1]); } @@ -153,14 +153,14 @@ mod tests { let mut src_file = File::open(Path::new("test-data/lexer/simple_ns.dm")).unwrap(); let mut src = String::new(); let _ = src_file.read_to_string(&mut src); - let result = tokenize(&src); + let result = tokenize(&src).unwrap(); assert_eq!(Token::Namespace, result[0]); assert_eq!(Token::Identifier(String::from("simple")), result[1]); } #[test] fn pub_mod_simple() { - let result = tokenize(&String::from("pub mod simple")); + let result = tokenize(&String::from("pub mod simple")).unwrap(); assert_eq!(Token::Public, result[0]); assert_eq!(Token::Module, result[1]); assert_eq!(Token::Identifier(String::from("simple")), result[2]); @@ -168,21 +168,22 @@ mod tests { #[test] fn curly_open_and_close() { - let result = tokenize(&String::from("{ }")); + let result = tokenize(&String::from("{ }")).unwrap(); assert_eq!(Token::CurlyOpen, result[0]); assert_eq!(Token::CurlyClose, result[1]); } #[test] fn simple_int() { - let result = tokenize(&String::from("int simple")); + let result = tokenize(&String::from("int simple")).unwrap(); assert_eq!(Token::Interface, result[0]); assert_eq!(Token::Identifier(String::from("simple")), result[1]); } #[test] fn ns_pub_mod_simple() { - let result = tokenize(&String::from("ns simple_ns\npub mod simple { }")); + let result = tokenize(&String::from("ns simple_ns\npub mod simple { }")) + .unwrap(); assert_eq!(Token::Namespace, result[0]); assert_eq!(Token::Identifier(String::from("simple_ns")), result[1]); assert_eq!(Token::Public, result[2]); @@ -194,14 +195,14 @@ mod tests { #[test] fn curly_open_and_close_no_space() { - let result = tokenize(&String::from("{}")); + let result = tokenize(&String::from("{}")).unwrap(); assert_eq!(Token::CurlyOpen, result[0]); assert_eq!(Token::CurlyClose, result[1]); } #[test] fn interface_function() { - let result = tokenize(&String::from("fn test(): Test")); + let result = tokenize(&String::from("fn test(): Test")).unwrap(); assert_eq!(Token::Function, result[0]); assert_eq!(Token::Identifier(String::from("test")), result[1]); assert_eq!(Token::ParenOpen, result[2]); @@ -212,7 +213,7 @@ mod tests { #[test] fn interface_prop() { - let result = tokenize(&String::from("test: Test")); + let result = tokenize(&String::from("test: Test")).unwrap(); assert_eq!(Token::Identifier(String::from("test")), result[0]); assert_eq!(Token::Colon, result[1]); assert_eq!(Token::Identifier(String::from("Test")), result[2]); @@ -220,7 +221,7 @@ mod tests { #[test] fn enum_decl() { - let result = tokenize(&String::from("enum Test {}")); + let result = tokenize(&String::from("enum Test {}")).unwrap(); assert_eq!(Token::Enum, result[0]); assert_eq!(Token::Identifier(String::from("Test")), result[1]); assert_eq!(Token::CurlyOpen, result[2]);