diff --git a/Cargo.lock b/Cargo.lock index 39b92d83b..d34baa9ee 100644 --- a/Cargo.lock +++ b/Cargo.lock @@ -1034,14 +1034,6 @@ dependencies = [ "str-buf", ] -[[package]] -name = "example-dylib" -version = "0.2.0" -dependencies = [ - "abi_stable", - "steel-core", -] - [[package]] name = "fallible-iterator" version = "0.2.0" diff --git a/benchmarks/bin-trees/bin-trees.scm b/benchmarks/bin-trees/bin-trees.scm index 2dc269877..ede6271ab 100644 --- a/benchmarks/bin-trees/bin-trees.scm +++ b/benchmarks/bin-trees/bin-trees.scm @@ -12,66 +12,53 @@ (struct node (left val right)) ;; Instead of (define-struct leaf (val)): -(define (leaf val) (node #f val #f)) -(define (leaf? l) (not (node-left l))) -(define (leaf-val l) node-val) +(define (leaf val) + (node #f val #f)) +(define (leaf? l) + (not (node-left l))) +(define (leaf-val l) + node-val) (define (make item d) (if (= d 0) (leaf item) - (%plain-let ((item2 (* item 2)) - (d2 (- d 1))) - (node (make (- item2 1) d2) - item - (make item2 d2))))) + (let ([item2 (* item 2)] [d2 (- d 1)]) (node (make (- item2 1) d2) item (make item2 d2))))) (define (check t) - (if (leaf? t) - 1 - (+ 1 (+ (check (node-left t)) - (check (node-right t)))))) + (if (leaf? t) 1 (+ 1 (+ (check (node-left t)) (check (node-right t)))))) (define (iterate n m d sum) - (if (equal? n m) - sum - (iterate (+ n 1) m d (+ sum (check (make n d)))))) + (if (equal? n m) sum (iterate (+ n 1) m d (+ sum (check (make n d)))))) +(define (max x y) + (if (> x y) x y)) (define (loop d end max-depth min-depth) - (if (>= d end) - void - (begin - (let ((iterations (arithmetic-shift 1 (+ (- max-depth d) min-depth)))) - (displayln iterations " trees of depth " d " check: " (iterate 0 iterations d 0)) - - - ) + (if (>= d end) + void + (begin + (let ([iterations (arithmetic-shift 1 (+ (- max-depth d) min-depth))]) + (displayln iterations " trees of depth " d " check: " (iterate 0 iterations d 0))) (loop (+ 2 d) end max-depth min-depth)))) - (define (main n) - (let* ((min-depth 4) - (max-depth (max (+ min-depth 2) n))) - (let ((stretch-depth (+ max-depth 1))) - (displayln "stretch tree of depth " stretch-depth " check: " (check (make 0 stretch-depth)))) - (let ((long-lived-tree (make 0 max-depth))) + (let* ([min-depth 4] [max-depth (max (+ min-depth 2) n)]) + (let ([stretch-depth (+ max-depth 1)]) + (displayln "stretch tree of depth " stretch-depth " check: " (check (make 0 stretch-depth)))) + (let ([long-lived-tree (make 0 max-depth)]) ; (begin - ; (define end ) + ; (define end ) + + (loop 4 (add1 max-depth) max-depth min-depth) - (loop 4 (add1 max-depth) - max-depth min-depth) - - ; ) + ; ) - - (displayln "long lived tree of depth " max-depth " check: " (check long-lived-tree)) - - ))) + (displayln "long lived tree of depth " max-depth " check: " (check long-lived-tree))))) (main 12) + ; (main 21) ; (main 21) - -; (command-line #:args (n) +; (command-line #:args (n) ; (main (string->number n))) diff --git a/cogs/module-tests/export.scm b/cogs/module-tests/export.scm new file mode 100644 index 000000000..225870eb9 --- /dev/null +++ b/cogs/module-tests/export.scm @@ -0,0 +1,38 @@ +(provide Applesauce + bananas + foo-bar-baz + new-identifier + one-more-identifier + another-identifier + Applesauce-foo + Applesauce-bar + Applesauce-baz + thing-should-not-escape + + my-fun-contracted-function) + +(define (bananas) + (error "Hello world")) + +(define (foo-bar-baz) + 10) + +(define/contract (my-fun-contracted-function x y) + (->/c int? int? int?) + (+ x y)) + +(define new-identifier 100) + +(define one-more-identifier 'foo-bar-baz) +(define another-identifier 100) + +(define-syntax thing-should-not-escape + (syntax-rules () + [(thing-should-not-escape x) (thing-should-not-escape2 x)])) + +(define-syntax thing-should-not-escape2 + (syntax-rules () + [(thing-should-not-escape x) x])) + +;; This should be provided! +(struct Applesauce (foo bar baz)) diff --git a/cogs/module-tests/import.scm b/cogs/module-tests/import.scm new file mode 100644 index 000000000..987382022 --- /dev/null +++ b/cogs/module-tests/import.scm @@ -0,0 +1,17 @@ +(require (prefix-in export. (only-in "export.scm" thing-should-not-escape Applesauce bananas))) + +export.Applesauce + +export.bananas + +(export.thing-should-not-escape 10) + +;; Dead code analysis would be nice as well +;; If we can run constant evaluation over the result without actually +;; taking the const evaluation branches, we can store the +;; resulting removed spans and just render them in the LSP +(cond + [(list? 10) (displayln "hello world!")] + [else + => + (displayln "foo bar")]) diff --git a/crates/example-dylib/Cargo.toml b/crates/example-dylib/Cargo.toml deleted file mode 100644 index 9d6418a59..000000000 --- a/crates/example-dylib/Cargo.toml +++ /dev/null @@ -1,14 +0,0 @@ -[package] -name = "example-dylib" -version = "0.2.0" -edition = "2021" - -[lib] -name = "example_dylib" -crate-type = ["cdylib"] - -# See more keys and their definitions at https://doc.rust-lang.org/cargo/reference/manifest.html - -[dependencies] -steel-core = { path = "../steel-core", version = "0.5.0", features = ["modules"] } -abi_stable = "0.11.1" diff --git a/crates/example-dylib/src/lib.rs b/crates/example-dylib/src/lib.rs deleted file mode 100644 index 41b550818..000000000 --- a/crates/example-dylib/src/lib.rs +++ /dev/null @@ -1,41 +0,0 @@ -use steel::steel_vm::{builtin::BuiltInModule, register_fn::RegisterFn}; -use steel::SteelVal; - -fn hidden_function() -> usize { - 10 -} - -// thread_local! { -// static MODULE: Rc = create_module(); -// } - -// #[no_mangle] -pub fn create_module() -> Box { - let mut module = BuiltInModule::new("external-dylib".to_string()); - - module.register_value("outside-value", SteelVal::StringV("Hello world!".into())); - module.register_fn("hidden-function", hidden_function); - - Box::new(module) - // module -} - -#[no_mangle] -pub fn generate_module() -> *mut BuiltInModule { - Box::into_raw(create_module()) -} - -#[no_mangle] -pub fn build_module(module: &mut BuiltInModule) { - module.set_name("external-dylib".to_string()); - - module.register_value("outside-value", SteelVal::StringV("Hello world!".into())); - module.register_fn("hidden-function", hidden_function); -} - -#[no_mangle] -pub fn free_module(ptr: *mut BuiltInModule) { - unsafe { - let _ = Box::from_raw(ptr); - } -} diff --git a/crates/steel-core/benches/my_benchmark.rs b/crates/steel-core/benches/my_benchmark.rs index 96a2511d4..0eec633b4 100644 --- a/crates/steel-core/benches/my_benchmark.rs +++ b/crates/steel-core/benches/my_benchmark.rs @@ -241,6 +241,9 @@ fn binary_trees(c: &mut Criterion) { (define (iterate n m d sum) (if (equal? n m) sum (iterate (+ n 1) m d (+ sum (check (make n d)))))) +(define (max x y) + (if (> x y) x y)) + (define (loop d end max-depth min-depth) (if (>= d end) void diff --git a/crates/steel-core/src/compiler/code_gen.rs b/crates/steel-core/src/compiler/code_gen.rs index f52d4bff8..a80d16fb3 100644 --- a/crates/steel-core/src/compiler/code_gen.rs +++ b/crates/steel-core/src/compiler/code_gen.rs @@ -50,7 +50,6 @@ pub struct CodeGenerator<'a> { fn eval_atom(t: &SyntaxObject) -> Result { match &t.ty { TokenType::BooleanLiteral(b) => Ok((*b).into()), - // TokenType::Identifier(s) => env.borrow().lookup(&s), TokenType::NumberLiteral(n) => Ok(SteelVal::NumV(*n)), TokenType::StringLiteral(s) => Ok(SteelVal::StringV(s.into())), TokenType::CharacterLiteral(c) => Ok(SteelVal::CharV(*c)), @@ -642,6 +641,10 @@ impl<'a> VisitorMut for CodeGenerator<'a> { self.push( LabeledInstruction::builder(OpCode::PUSHCONST) .payload(idx) + // TODO: This is a little suspect, we're doing a bunch of stuff twice + // that we really don't need. In fact, we probably can get away with just... + // embedding the steel val directly here. + .list_contents(crate::parser::ast::ExprKind::Quote(Box::new(quote.clone()))) .constant(true), ); diff --git a/crates/steel-core/src/compiler/compiler.rs b/crates/steel-core/src/compiler/compiler.rs index 76d94b672..183e0bf90 100644 --- a/crates/steel-core/src/compiler/compiler.rs +++ b/crates/steel-core/src/compiler/compiler.rs @@ -9,6 +9,7 @@ use crate::{ reader::MultipleArityFunctions, shadow::RenameShadowedVariables, }, }, + core::labels::Expr, parser::{ ast::AstTools, expand_visitor::{expand_kernel, expand_kernel_in_env}, @@ -80,11 +81,11 @@ impl DebruijnIndicesInterner { Instruction { op_code: OpCode::BIND, contents: - Some(SyntaxObject { + Some(Expr::Atom(SyntaxObject { ty: TokenType::Identifier(s), span, .. - }), + })), .. }, Instruction { @@ -110,11 +111,11 @@ impl DebruijnIndicesInterner { Instruction { op_code: OpCode::BIND, contents: - Some(SyntaxObject { + Some(Expr::Atom(SyntaxObject { ty: TokenType::Identifier(s), span, .. - }), + })), .. }, .., @@ -176,10 +177,10 @@ impl DebruijnIndicesInterner { Instruction { op_code: OpCode::BIND, contents: - Some(SyntaxObject { + Some(Expr::Atom(SyntaxObject { ty: TokenType::Identifier(s), .. - }), + })), .. } => { // Keep track of where the defines actually are in the process @@ -188,21 +189,21 @@ impl DebruijnIndicesInterner { Instruction { op_code: OpCode::PUSH, contents: - Some(SyntaxObject { + Some(Expr::Atom(SyntaxObject { ty: TokenType::Identifier(s), span, .. - }), + })), .. } | Instruction { op_code: OpCode::SET, contents: - Some(SyntaxObject { + Some(Expr::Atom(SyntaxObject { ty: TokenType::Identifier(s), span, .. - }), + })), .. } => { if self.flat_defines.get(s).is_some() @@ -219,27 +220,26 @@ impl DebruijnIndicesInterner { // TODO commenting this for now if let Some(x) = instructions.get_mut(i) { x.payload_size = idx; - x.constant = false; } } Instruction { op_code: OpCode::CALLGLOBAL, contents: - Some(SyntaxObject { + Some(Expr::Atom(SyntaxObject { ty: TokenType::Identifier(s), span, .. - }), + })), .. } | Instruction { op_code: OpCode::CALLGLOBALTAIL, contents: - Some(SyntaxObject { + Some(Expr::Atom(SyntaxObject { ty: TokenType::Identifier(s), span, .. - }), + })), .. } => { if self.flat_defines.get(s).is_some() @@ -256,7 +256,6 @@ impl DebruijnIndicesInterner { // TODO commenting this for now if let Some(x) = instructions.get_mut(i) { x.payload_size = idx; - x.constant = false; } } _ => {} @@ -477,6 +476,25 @@ impl Compiler { self.lower_expressions_impl(parsed, constants, builtin_modules, path, sources) } + pub fn emit_expanded_ast_without_optimizations( + &mut self, + expr_str: &str, + constants: ImmutableHashMap, + path: Option, + sources: &mut Sources, + builtin_modules: ModuleContainer, + ) -> Result> { + let id = sources.add_source(expr_str.to_string(), path.clone()); + + // Could fail here + let parsed: std::result::Result, ParseError> = + Parser::new(expr_str, Some(id)).collect(); + + let parsed = parsed?; + + self.expand_ast(parsed, constants, builtin_modules, path, sources) + } + pub fn compile_module( &mut self, path: PathBuf, @@ -554,7 +572,7 @@ impl Compiler { Ok(results) } - fn lower_expressions_impl( + fn expand_ast( &mut self, exprs: Vec, constants: ImmutableHashMap, @@ -570,16 +588,146 @@ impl Compiler { .map(lower_entire_ast) .collect::, ParseError>>()?; - if log_enabled!(log::Level::Debug) { - debug!( - "Generating instructions for the expression: {:?}", - expanded_statements - .iter() - .map(|x| x.to_string()) - .collect::>() - ); + // if log_enabled!(log::Level::Debug) { + // debug!( + // "Generating instructions for the expression: {:?}", + // expanded_statements + // .iter() + // .map(|x| x.to_string()) + // .collect::>() + // ); + // } + + log::debug!(target: "expansion-phase", "Expanding macros -> phase 1"); + + if let Some(kernel) = self.kernel.as_mut() { + // Label anything at the top as well - top level + kernel.load_syntax_transformers(&mut expanded_statements, "top-level".to_string())?; } + expanded_statements = expanded_statements + .into_iter() + .map(|x| { + expand_kernel_in_env( + x, + self.kernel.as_mut(), + builtin_modules.clone(), + "top-level".to_string(), + ) + .and_then(|x| crate::parser::expand_visitor::expand(x, &self.macro_env)) + }) + .collect::>>()?; + + expanded_statements = expanded_statements + .into_iter() + .map(lower_entire_ast) + .collect::, ParseError>>()?; + + // expanded_statements.pretty_print(); + + log::debug!(target: "expansion-phase", "Beginning constant folding"); + + let mut expanded_statements = + self.apply_const_evaluation(constants.clone(), expanded_statements, false)?; + + RenameShadowedVariables::rename_shadowed_vars(&mut expanded_statements); + + let mut analysis = Analysis::from_exprs(&expanded_statements); + analysis.populate_captures(&expanded_statements); + + let mut semantic = SemanticAnalysis::from_analysis(&mut expanded_statements, analysis); + + // This is definitely broken still + semantic + .elide_single_argument_lambda_applications() + .replace_non_shadowed_globals_with_builtins( + &mut self.macro_env, + &mut self.module_manager, + &mut self.mangled_identifiers, + ) + // TODO: To get this to work, we have to check the macros to make sure those + // are safe to eliminate. In interactive mode, we'll + // be unable to optimize those away + .remove_unused_globals_with_prefix("mangler", &self.macro_env, &self.module_manager); + + // Don't do lambda lifting here + // .lift_pure_local_functions() + // .lift_all_local_functions(); + + // debug!("About to expand defines"); + + log::debug!(target: "expansion-phase", "Flattening begins, converting internal defines to let expressions"); + + let mut analysis = semantic.into_analysis(); + + let mut expanded_statements = flatten_begins_and_expand_defines(expanded_statements); + + // After define expansion, we'll want this + RenameShadowedVariables::rename_shadowed_vars(&mut expanded_statements); + + analysis.fresh_from_exprs(&expanded_statements); + analysis.populate_captures(&expanded_statements); + + let mut semantic = SemanticAnalysis::from_analysis(&mut expanded_statements, analysis); + semantic.refresh_variables(); + semantic.flatten_anonymous_functions(); + semantic.refresh_variables(); + + // Replace mutation with boxes + semantic.populate_captures(); + semantic.populate_captures(); + + semantic.replace_mutable_captured_variables_with_boxes(); + + log::debug!(target: "expansion-phase", "Expanding multiple arity functions"); + + let mut analysis = semantic.into_analysis(); + + // Rename them again + RenameShadowedVariables::rename_shadowed_vars(&mut expanded_statements); + + let mut expanded_statements = + MultipleArityFunctions::expand_multiple_arity_functions(expanded_statements); + + log::info!(target: "expansion-phase", "Aggressive constant evaluation with memoization"); + + // Begin lowering anonymous function calls to lets + + analysis.fresh_from_exprs(&expanded_statements); + analysis.populate_captures(&expanded_statements); + let mut semantic = SemanticAnalysis::from_analysis(&mut expanded_statements, analysis); + + semantic.replace_anonymous_function_calls_with_plain_lets(); + + Ok(expanded_statements) + } + + fn lower_expressions_impl( + &mut self, + exprs: Vec, + constants: ImmutableHashMap, + builtin_modules: ModuleContainer, + path: Option, + sources: &mut Sources, + ) -> Result> { + let mut expanded_statements = + self.expand_expressions(exprs, path, sources, builtin_modules.clone())?; + + expanded_statements = expanded_statements + .into_iter() + .map(lower_entire_ast) + .collect::, ParseError>>()?; + + // if log_enabled!(log::Level::Debug) { + // debug!( + // "Generating instructions for the expression: {:?}", + // expanded_statements + // .iter() + // .map(|x| x.to_string()) + // .collect::>() + // ); + // } + log::debug!(target: "expansion-phase", "Expanding macros -> phase 1"); if let Some(kernel) = self.kernel.as_mut() { @@ -633,25 +781,24 @@ impl Compiler { .remove_unused_globals_with_prefix("mangler", &self.macro_env, &self.module_manager) .lift_pure_local_functions() .lift_all_local_functions(); - // .remove_unused_globals_with_prefix("manglersteel/"); // debug!("About to expand defines"); log::debug!(target: "expansion-phase", "Flattening begins, converting internal defines to let expressions"); + let mut analysis = semantic.into_analysis(); + let mut expanded_statements = flatten_begins_and_expand_defines(expanded_statements); // After define expansion, we'll want this RenameShadowedVariables::rename_shadowed_vars(&mut expanded_statements); - let mut analysis = Analysis::from_exprs(&expanded_statements); + analysis.fresh_from_exprs(&expanded_statements); analysis.populate_captures(&expanded_statements); let mut semantic = SemanticAnalysis::from_analysis(&mut expanded_statements, analysis); semantic.refresh_variables(); - semantic.flatten_anonymous_functions(); - semantic.refresh_variables(); // Replace mutation with boxes @@ -660,18 +807,20 @@ impl Compiler { semantic.replace_mutable_captured_variables_with_boxes(); - if log_enabled!(log::Level::Debug) { - debug!( - "Successfully expanded defines: {:?}", - expanded_statements - .iter() - .map(|x| x.to_string()) - .collect::>() - ); - } + // if log_enabled!(log::Level::Debug) { + // debug!( + // "Successfully expanded defines: {:?}", + // expanded_statements + // .iter() + // .map(|x| x.to_string()) + // .collect::>() + // ); + // } log::debug!(target: "expansion-phase", "Expanding multiple arity functions"); + let mut analysis = semantic.into_analysis(); + // Rename them again RenameShadowedVariables::rename_shadowed_vars(&mut expanded_statements); @@ -683,17 +832,20 @@ impl Compiler { // Begin lowering anonymous function calls to lets - let mut analysis = Analysis::from_exprs(&expanded_statements); + // let mut analysis = Analysis::from_exprs(&expanded_statements); + // let mut analysis = semantic.into_analysis(); + analysis.fresh_from_exprs(&expanded_statements); analysis.populate_captures(&expanded_statements); - let mut semantic = SemanticAnalysis::from_analysis(&mut expanded_statements, analysis); - semantic.populate_captures(); + // semantic.populate_captures(); semantic.replace_anonymous_function_calls_with_plain_lets(); - // Done lowering anonymous function calls to let + Ok(expanded_statements) - self.apply_const_evaluation(constants, expanded_statements, true) + // Done lowering anonymous function calls to let + // TODO: Re-enable this, but not in the repl. This repl causes... issues with the implementation + // self.apply_const_evaluation(constants, expanded_statements, true) } // TODO @@ -727,6 +879,9 @@ impl Compiler { // Make sure to apply the peephole optimizations raw_program.apply_optimizations(); + // Lets see everything that gets run! + // raw_program.debug_print(); + Ok(raw_program) } diff --git a/crates/steel-core/src/compiler/constants.rs b/crates/steel-core/src/compiler/constants.rs index c0f492b84..388975363 100644 --- a/crates/steel-core/src/compiler/constants.rs +++ b/crates/steel-core/src/compiler/constants.rs @@ -6,6 +6,7 @@ use crate::parser::{ parser::{ParseError, Parser}, }; +use std::collections::HashMap; use std::{cell::RefCell, rc::Rc}; // TODO add the serializing and deserializing for constants @@ -14,7 +15,10 @@ use serde::{Deserialize, Serialize}; // Shared constant map - for repeated in memory execution of a program, this is going to share the same // underlying representation. #[derive(Debug, PartialEq)] -pub struct ConstantMap(Rc>>); +pub struct ConstantMap { + map: Rc>>, + values: Rc>>, +} #[derive(Serialize, Deserialize)] pub struct SerializableConstantMap(Vec); @@ -31,13 +35,19 @@ impl Default for ConstantMap { impl Clone for ConstantMap { fn clone(&self) -> Self { - Self(Rc::clone(&self.0)) + Self { + values: Rc::clone(&self.values), + map: Rc::clone(&self.map), + } } } impl ConstantMap { pub fn new() -> ConstantMap { - ConstantMap(Rc::new(RefCell::new(Vec::new()))) + ConstantMap { + values: Rc::new(RefCell::new(Vec::new())), + map: Rc::new(RefCell::new(HashMap::new())), + } } pub(crate) fn into_serializable_map(self) -> SerializableConstantMap { @@ -45,7 +55,7 @@ impl ConstantMap { } pub fn to_serializable_vec(&self) -> Vec { - self.0 + self.values .borrow() .iter() .cloned() @@ -56,18 +66,29 @@ impl ConstantMap { // There might be a better way of doing this - but provide this as an option // in the event we want a deep clone of the constant map - pub fn deep_clone(&self) -> ConstantMap { - ConstantMap(Rc::new(RefCell::new( - self.0.borrow().iter().cloned().collect(), - ))) - } + // pub fn deep_clone(&self) -> ConstantMap { + // ConstantMap(Rc::new(RefCell::new( + // self.0.borrow().iter().cloned().collect(), + // ))) + // } pub fn from_vec(vec: Vec) -> ConstantMap { - ConstantMap(Rc::new(RefCell::new(vec))) + // ConstantMap(Rc::new(RefCell::new(vec))) + + ConstantMap { + map: Rc::new(RefCell::new( + vec.clone() + .into_iter() + .enumerate() + .map(|x| (x.1, x.0)) + .collect(), + )), + values: Rc::new(RefCell::new(vec)), + } } fn to_constant_expr_map(&self) -> Vec { - self.0 + self.values .borrow() .iter() .map(|x| match x { @@ -113,7 +134,7 @@ impl ConstantMap { // Ok(SteelVal::try_from(parsed[0].clone()).unwrap()) }) .collect::>>() - .map(|x| ConstantMap(Rc::new(RefCell::new(x)))) + .map(Self::from_vec) } // pub fn from_bytes(encoded: &[u8]) -> ConstantMap { @@ -124,18 +145,22 @@ impl ConstantMap { impl ConstantMap { pub fn add(&mut self, val: SteelVal) -> usize { let idx = self.len(); - self.0.borrow_mut().push(val); + self.values.borrow_mut().push(val.clone()); + + // TODO: Consider just storing the hash code, not the actual value. + self.map.borrow_mut().insert(val, idx); + idx } // Fallible #[inline(always)] pub fn get(&self, idx: usize) -> SteelVal { - self.0.borrow()[idx].clone() + self.values.borrow()[idx].clone() } pub fn try_get(&self, idx: usize) -> Option { - self.0.borrow().get(idx).cloned() + self.values.borrow().get(idx).cloned() } // Replace with existing constants if they already exist @@ -163,7 +188,7 @@ impl ConstantMap { }; } - let idx = { self.0.borrow_mut().iter().position(|x| x == &val) }; + let idx = self.map.borrow_mut().get(&val).copied(); if let Some(idx) = idx { idx @@ -173,20 +198,20 @@ impl ConstantMap { } pub fn len(&self) -> usize { - self.0.borrow().len() + self.values.borrow().len() } pub fn is_empty(&self) -> bool { - self.0.borrow().is_empty() + self.values.borrow().is_empty() } pub fn roll_back(&mut self, idx: usize) { - self.0.borrow_mut().truncate(idx); + self.values.borrow_mut().truncate(idx); } #[cfg(test)] pub fn clear(&mut self) { - self.0.borrow_mut().clear() + self.values.borrow_mut().clear() } } diff --git a/crates/steel-core/src/compiler/map.rs b/crates/steel-core/src/compiler/map.rs index 0c46bb468..1234f2ece 100644 --- a/crates/steel-core/src/compiler/map.rs +++ b/crates/steel-core/src/compiler/map.rs @@ -6,7 +6,6 @@ use std::collections::HashMap; #[derive(Debug, PartialEq, Clone, Serialize, Deserialize)] pub struct SymbolMap { values: Vec, - // TODO don't do this - don't expose this API map: HashMap, } diff --git a/crates/steel-core/src/compiler/modules.rs b/crates/steel-core/src/compiler/modules.rs index 10d6f3634..0912ea123 100644 --- a/crates/steel-core/src/compiler/modules.rs +++ b/crates/steel-core/src/compiler/modules.rs @@ -9,7 +9,10 @@ use crate::{ }, parser::{ ast::{AstTools, Atom, Begin, Define, ExprKind, List, Quote}, - expand_visitor::{expand_kernel, expand_kernel_in_env}, + expand_visitor::{ + expand_kernel, expand_kernel_in_env, expand_kernel_in_env_with_allowed, + expand_kernel_in_env_with_change, + }, interner::InternedString, kernel::Kernel, parser::{ @@ -18,7 +21,7 @@ use crate::{ }, tokens::TokenType, }, - steel_vm::{engine::ModuleContainer, transducers::interleave}, + steel_vm::{builtin::BuiltInModule, engine::ModuleContainer, transducers::interleave}, }; use crate::{parser::expand_visitor::Expander, rvals::Result}; @@ -84,7 +87,8 @@ declare_builtins!( "#%private/steel/contract" => "../scheme/modules/contracts.scm", "#%private/steel/print" => "../scheme/print.scm", "#%private/steel/control" => "../scheme/modules/parameters.scm", - "#%private/steel/reader" => "../scheme/modules/reader.scm" + "#%private/steel/reader" => "../scheme/modules/reader.scm", + "#%private/steel/stdlib" => "../scheme/stdlib.scm" ); create_prelude!( @@ -239,9 +243,7 @@ impl ModuleManager { let mut explicit_requires = HashMap::new(); - for require_object in &module_builder.require_objects - // .chain(module_builder.built_ins.iter()) - { + for require_object in &module_builder.require_objects { let path = require_object.path.get_path(); explicit_requires.clear(); @@ -266,7 +268,7 @@ impl ModuleManager { let module = if let Some(module) = module_builder.compiled_modules.get(path.as_ref()) { module } else { - log::debug!(target: "modules", "No provides found for module, skipping: {:?}", path); + // log::debug!(target: "modules", "No provides found for module, skipping: {:?}", path); continue; }; @@ -306,6 +308,15 @@ impl ModuleManager { continue; } + // TODO: This should surface an error - cannot use contract + // out on a macro + if module + .macro_map + .contains_key(name.atom_identifier().unwrap()) + { + continue; + } + // TODO: THe contract has to get mangled with the prefix as well? let contract = l.args.get(2).unwrap(); @@ -369,6 +380,13 @@ impl ModuleManager { continue; } + if module + .macro_map + .contains_key(name.atom_identifier().unwrap()) + { + continue; + } + let hash_get = expr_list![ ExprKind::ident("%proto-hash-get%"), ExprKind::atom( @@ -431,6 +449,13 @@ impl ModuleManager { continue; } + if module + .macro_map + .contains_key(provide.atom_identifier().unwrap()) + { + continue; + } + let hash_get = expr_list![ ExprKind::ident("%proto-hash-get%"), ExprKind::atom("__module-".to_string() + &other_module_prefix), @@ -484,51 +509,85 @@ impl ModuleManager { // TODO: Move this to the lower level as well // It seems we're only doing this expansion at the top level, but we _should_ do this at the lower level as well - for require_for_syntax in module_builder - .require_objects - .iter() - .filter(|x| x.for_syntax) - .map(|x| x.path.get_path()) + for require_object in module_builder.require_objects.iter() + // .filter(|x| x.for_syntax) + // .map(|x| x.path.get_path()) { - let (module, mut in_scope_macros) = Self::find_in_scope_macros( + let require_for_syntax = require_object.path.get_path(); + + let (module, mut in_scope_macros, mut name_mangler) = Self::find_in_scope_macros( &self.compiled_modules, require_for_syntax.as_ref(), + &require_object, &mut mangled_asts, ); - // dbg!(&in_scope_macros); - - // for (key, value) in &mut in_scope_macros { - // for line in value.exprs_mut() { - // println!("{}", line); - // } - // } - - // ast = ast.into_iter().map(|x| ) - - // ast.pretty_print(); + let kernel_macros_in_scope: HashSet<_> = + module.provides_for_syntax.iter().cloned().collect(); ast = ast .into_iter() .map(|x| { + // @matt 12/8/2023 + // The easiest thing to do here, is to go to the other module, and find + // what defmacros have been exposed on the require for syntax. Once those + // have been found, we run a pass with kernel expansion, limiting the + // expander to only use the macros that we've exposed. After that, + // we run the expansion again, using the full suite of defmacro capabilities. + // + // The question that remains - how to define the neat phases of what kinds + // of macros can expand into what? Can defmacro -> syntax-rules -> defmacro? + // This could eventually prove to be cumbersome, but it is still early + // for defmacro. Plus, I need to create a syntax-case or syntax-parse + // frontend before the defmacro style macros become too pervasive. + // + // TODO: Replicate this behavior over to builtin modules + // First expand the in scope macros // These are macros let mut expander = Expander::new(&in_scope_macros); - let first_round_expanded = expander.expand(x)?; - - if expander.changed { + let mut first_round_expanded = expander.expand(x)?; + let mut changed = false; + + // (first_round_expanded, changed) = expand_kernel_in_env_with_allowed( + // first_round_expanded, + // kernel.as_mut(), + // // We don't need to expand those here + // ModuleContainer::default(), + // module.name.to_str().unwrap().to_string(), + // &kernel_macros_in_scope, + // )?; + + // If the kernel expander expanded into something - go ahead + // and expand all of the macros in this + // if changed || expander.changed { + // Expand here? + // first_round_expanded = expand(first_round_expanded, &module.macro_map)?; + + // Probably don't need this + // (first_round_expanded, changed) = expand_kernel_in_env_with_change( + // first_round_expanded, + // kernel.as_mut(), + // ModuleContainer::default(), + // module.name.to_str().unwrap().to_string(), + // )?; + + // This is pretty suspect, and needs to be revisited - only the output of the + // macro expansion and not the whole thing needs to be mangled most likely. + // Otherwise, we'll run into weird stuff? + // if changed { + // name_mangler.visit(&mut first_round_expanded); + // } + // } + + if expander.changed || changed { expand(first_round_expanded, &module.macro_map) } else { Ok(first_round_expanded) } - - // expand(x, &module.macro_map) }) .collect::>()?; - // TODO: @Matt 10/16/12 - // This won't work if the macros expand to other private macros. - // Tracking issue here: global_macro_map.extend(in_scope_macros); } @@ -543,17 +602,24 @@ impl ModuleManager { // @Matt 7/4/23 // TODO: With mangling, this could cause problems. We'll want to un-mangle quotes AFTER the macro has been expanded, // in order to preserve the existing behavior. - module_statements + let result = module_statements .into_iter() .map(|x| expand(x, global_macro_map)) - .collect::>() + .collect::>(); + + result } fn find_in_scope_macros<'a>( compiled_modules: &'a HashMap, require_for_syntax: &'a PathBuf, + require_object: &'a RequireObject, mangled_asts: &'a mut Vec, - ) -> (&'a CompiledModule, HashMap) { + ) -> ( + &'a CompiledModule, + HashMap, + NameMangler, + ) { let module = compiled_modules .get(require_for_syntax) .expect(&format!("Module missing!: {:?}", require_for_syntax)); @@ -582,9 +648,11 @@ impl ModuleManager { // do this for each of the expressions in the file in this loop // TODO -> try not cloning this // TODO -> do this in the module expansion as well - let in_scope_macros = module + let mut in_scope_macros = module .provides_for_syntax .iter() + // Chain with just the normal provides! + // .chain(module.provides) .filter_map(|x| module.macro_map.get(x).map(|m| (*x, m.clone()))) // TODO -> fix this unwrap .map(|mut x| { for expr in x.1.exprs_mut() { @@ -594,12 +662,87 @@ impl ModuleManager { x }) .collect::>(); + + // If the require_object specifically imports things, we should reference it + + if !require_object.idents_to_import.is_empty() { + for maybe in &require_object.idents_to_import { + match maybe { + MaybeRenamed::Normal(n) => { + if let Some(ident) = n.atom_identifier() { + if let Some(mut m) = module.macro_map.get(ident).cloned() { + for expr in m.exprs_mut() { + name_mangler.visit(expr); + } + + if let Some(prefix) = &require_object.prefix { + in_scope_macros + .insert((prefix.to_string() + ident.resolve()).into(), m); + } else { + in_scope_macros.insert(*ident, m); + } + } + } + } + MaybeRenamed::Renamed(from, to) => { + if let Some(ident) = from.atom_identifier() { + if let Some(mut m) = module.macro_map.get(ident).cloned() { + for expr in m.exprs_mut() { + name_mangler.visit(expr); + } + // TODO: Remove this unwrap + // in_scope_macros.insert(*to.atom_identifier().unwrap(), m); + + if let Some(prefix) = &require_object.prefix { + in_scope_macros.insert( + (prefix.to_string() + + to.atom_identifier().unwrap().resolve()) + .into(), + m, + ); + } else { + in_scope_macros.insert(*to.atom_identifier().unwrap(), m); + } + } + } + } + } + } + } else { + // Pull in all of the macros that the module exposes + + for provide_expr in &module.provides { + if let Some(provide_expr) = provide_expr.list() { + for ident in provide_expr.args.split_first().unwrap().1 { + // println!("Looking for {}", ident); + + if let Some(ident) = ident.atom_identifier() { + if let Some(mut m) = module.macro_map.get(ident).cloned() { + // println!("Pulling in macro: {}", ident); + + for expr in m.exprs_mut() { + name_mangler.visit(expr); + } + + if let Some(prefix) = &require_object.prefix { + in_scope_macros + .insert((prefix.to_string() + ident.resolve()).into(), m); + } else { + in_scope_macros.insert(*ident, m); + } + } + } + } + } + } + } + // Check what macros are in scope here - debug!( - "In scope macros: {:#?}", - in_scope_macros.keys().collect::>() - ); - (module, in_scope_macros) + // println!( + // "In scope macros: {:#?}", + // in_scope_macros.keys().collect::>() + // ); + (module, in_scope_macros, name_mangler) } #[cfg(not(feature = "modules"))] @@ -616,6 +759,10 @@ impl ModuleManager { } } +// Pre-compile module to bytecode? Is it even possible? +// Dynamically linking the module would then make it relatively +// easy to just load everything up at the start. +// Compiled module _should_ be possible now. Just create a target #[derive(Debug, Clone, serde::Serialize, serde::Deserialize)] pub struct CompiledModule { name: PathBuf, @@ -648,6 +795,10 @@ impl CompiledModule { } } + pub fn get_ast(&self) -> &[ExprKind] { + &self.ast + } + pub fn get_provides(&self) -> &[ExprKind] { &self.provides } @@ -668,6 +819,7 @@ impl CompiledModule { let mut globals = collect_globals(&self.ast); let mut exprs = self.ast.clone(); + let mut provide_definitions = Vec::new(); let prefix = "mangler".to_string() + self.name.to_str().unwrap() + MANGLER_SEPARATOR; @@ -711,6 +863,10 @@ impl CompiledModule { match provide { ExprKind::List(l) => { if let Some(qualifier) = l.first_ident() { + if module.macro_map.contains_key(qualifier) { + continue; + } + match *qualifier { x if x == *CONTRACT_OUT => { // Directly expand into define/contract, but with the value just being the hash get below @@ -796,6 +952,13 @@ impl CompiledModule { continue; } + if module + .macro_map + .contains_key(provide.atom_identifier().unwrap()) + { + continue; + } + // Mangle with a prefix if necessary let mut provide = provide.clone(); @@ -941,6 +1104,9 @@ impl CompiledModule { } } + // Drop all of the macro references here + provides.retain(|x| !self.macro_map.contains_key(x.0.atom_identifier().unwrap())); + // We want one without the mangled version, for the actual provides let un_mangled = provides.clone(); @@ -1063,9 +1229,9 @@ impl CompiledModule { // TODO clean this up let res = ExprKind::List(List::new(body)); - if log_enabled!(target: "requires", log::Level::Debug) { - debug!(target: "requires", "Module ast node: {}", res.to_string()); - } + // if log_enabled!(target: "requires", log::Level::Debug) { + // debug!(target: "requires", "Module ast node: {}", res.to_string()); + // } res } @@ -1130,12 +1296,8 @@ struct ModuleBuilder<'a> { source_ast: Vec, macro_map: HashMap, // TODO: Change the requires / requires_for_syntax to just be a require enum? - - // requires: Vec, - // requires_for_syntax: Vec, require_objects: Vec, - // built_ins: Vec, provides: Vec, provides_for_syntax: Vec, compiled_modules: &'a mut HashMap, @@ -1193,7 +1355,7 @@ impl<'a> ModuleBuilder<'a> { } fn compile(&mut self) -> Result> { - debug!(target: "requires", "Visiting: {:?}", self.name); + // debug!(target: "requires", "Visiting: {:?}", self.name); // @Matt - 10/3/23 // This has a relatively fatal flaw at the moment: @@ -1227,9 +1389,9 @@ impl<'a> ModuleBuilder<'a> { self.collect_provides()?; if log_enabled!(log::Level::Info) { - debug!(target: "requires", "Requires: {:#?}", self.require_objects); - debug!(target: "requires", "Provides: {:#?}", self.provides); - debug!(target: "requires", "Provides for-syntax: {:?}", self.provides_for_syntax); + // debug!(target: "requires", "Requires: {:#?}", self.require_objects); + // debug!(target: "requires", "Provides: {:#?}", self.provides); + // debug!(target: "requires", "Provides for-syntax: {:?}", self.provides_for_syntax); } if self.visited.contains(&self.name) { @@ -1272,7 +1434,7 @@ impl<'a> ModuleBuilder<'a> { // Otherwise go ahead and compile // If we already have compiled this module, get it from the cache if let Some(_m) = self.compiled_modules.get(module.as_ref()) { - debug!("Getting {:?} from the module cache", module); + // debug!("Getting {:?} from the module cache", module); // println!("Already found in the cache: {:?}", module); // new_exprs.push(m.to_module_ast_node()); // No need to do anything @@ -1310,27 +1472,13 @@ impl<'a> ModuleBuilder<'a> { // This will eventually put the module in the cache let mut module_exprs = new_module.compile()?; - // debug!("Inside {:?} - append {:?}", self.name, module); - if log_enabled!(log::Level::Debug) { - debug!( - "appending with {:?}", - module_exprs.iter().map(|x| x.to_string()).join(" SEP ") - ); - } - new_exprs.append(&mut module_exprs); - // TODO evaluate this - - // let mut ast = std::mem::replace(&mut new_module.source_ast, Vec::new()); - // ast.append(&mut module_exprs); - // new_module.source_ast = ast; - // Probably want to evaluate a module even if it has no provides? if !new_module.provides.is_empty() { new_exprs.push(new_module.compile_module()?); } else { - log::debug!(target: "requires", "Found no provides, skipping compilation of module: {:?}", new_module.name); + // log::debug!(target: "requires", "Found no provides, skipping compilation of module: {:?}", new_module.name); } } @@ -1359,7 +1507,7 @@ impl<'a> ModuleBuilder<'a> { if !should_recompile { // If we already have compiled this module, get it from the cache if let Some(_m) = self.compiled_modules.get(module.as_ref()) { - debug!("Getting {:?} from the module cache", module); + // debug!("Getting {:?} from the module cache", module); // println!("Already found in the cache: {:?}", module); // new_exprs.push(m.to_module_ast_node()); // No need to do anything @@ -1384,13 +1532,13 @@ impl<'a> ModuleBuilder<'a> { let mut module_exprs = new_module.compile()?; // debug!("Inside {:?} - append {:?}", self.name, module); - if log_enabled!(log::Level::Debug) { - debug!( - target: "modules", - "appending with {:?}", - module_exprs.iter().map(|x| x.to_string()).join(" SEP ") - ); - } + // if log_enabled!(log::Level::Debug) { + // debug!( + // target: "modules", + // "appending with {:?}", + // module_exprs.iter().map(|x| x.to_string()).join(" SEP ") + // ); + // } new_exprs.append(&mut module_exprs); @@ -1411,9 +1559,9 @@ impl<'a> ModuleBuilder<'a> { // else if !new_module.compiled_modules.contains_key(&new_module.name) { new_exprs.push(new_module.compile_module()?); } else { - log::debug!(target: "requires", "Found no provides, skipping compilation of module: {:?}", new_module.name); - log::debug!(target: "requires", "Module already in the cache: {}", new_module.compiled_modules.contains_key(&new_module.name)); - log::debug!(target: "requires", "Compiled modules: {:?}", new_module.compiled_modules.keys().collect::>()); + // log::debug!(target: "requires", "Found no provides, skipping compilation of module: {:?}", new_module.name); + // log::debug!(target: "requires", "Module already in the cache: {}", new_module.compiled_modules.contains_key(&new_module.name)); + // log::debug!(target: "requires", "Compiled modules: {:?}", new_module.compiled_modules.keys().collect::>()); } // else { @@ -1434,11 +1582,11 @@ impl<'a> ModuleBuilder<'a> { // Clone the requires... I suppose let requires = self.require_objects.clone(); - info!( - target: "requires", - "Into compiled module: provides for syntax: {:?}", - self.provides_for_syntax - ); + // info!( + // target: "requires", + // "Into compiled module: provides for syntax: {:?}", + // self.provides_for_syntax + // ); // Attempt extracting the syntax transformers from this module if let Some(kernel) = self.kernel.as_mut() { @@ -1486,27 +1634,57 @@ impl<'a> ModuleBuilder<'a> { let mut mangled_asts = Vec::new(); // Look for the modules in the requires for syntax - for require_for_syntax in self - .require_objects - .iter() - .filter(|x| x.for_syntax) - .map(|x| x.path.get_path()) + for require_object in self.require_objects.iter() + // .filter(|x| x.for_syntax) { - let (module, in_scope_macros) = ModuleManager::find_in_scope_macros( + let require_for_syntax = require_object.path.get_path(); + + let (module, in_scope_macros, mut name_mangler) = ModuleManager::find_in_scope_macros( self.compiled_modules, require_for_syntax.as_ref(), + &require_object, &mut mangled_asts, ); + let kernel_macros_in_scope: HashSet<_> = + module.provides_for_syntax.iter().cloned().collect(); + ast = ast .into_iter() .map(|x| { // First expand the in scope macros // These are macros let mut expander = Expander::new(&in_scope_macros); - let first_round_expanded = expander.expand(x)?; - - if expander.changed { + let mut first_round_expanded = expander.expand(x)?; + let mut changed = false; + + // (first_round_expanded, changed) = expand_kernel_in_env_with_allowed( + // first_round_expanded, + // self.kernel.as_mut(), + // // We don't need to expand those here + // ModuleContainer::default(), + // module.name.to_str().unwrap().to_string(), + // &kernel_macros_in_scope, + // )?; + + // If the kernel expander expanded into something - go ahead + // and expand all of the macros in this + // if changed || expander.changed { + // Expand here? + // first_round_expanded = expand(first_round_expanded, &module.macro_map)?; + + // Probably don't need this + // (first_round_expanded, changed) = expand_kernel_in_env_with_change( + // first_round_expanded, + // self.kernel.as_mut(), + // ModuleContainer::default(), + // module.name.to_str().unwrap().to_string(), + // )?; + + // name_mangler.visit(&mut first_round_expanded); + // } + + if expander.changed || changed { expand(first_round_expanded, &module.macro_map) } else { Ok(first_round_expanded) @@ -1588,9 +1766,18 @@ impl<'a> ModuleBuilder<'a> { module.set_emitted(true); + // println!( + // "-------------- Emitting module: {:?} ----------------------", + // self.name + // ); + let mut result = module.to_top_level_module(self.compiled_modules, self.global_macro_map)?; + // println!("{}", result.to_pretty(60)); + + // println!("------------------ Finish ----------------------------------"); + // let mut analysis = Analysis::from_exprs(&[result]); // let mut semantic = SemanticAnalysis::from_analysis(&mut result, analysis); @@ -1599,7 +1786,7 @@ impl<'a> ModuleBuilder<'a> { // semantic // .remove_unused_globals_with_prefix("mangler"); - log::debug!(target: "requires", "Adding compiled module: {:?}", self.name); + // log::debug!(target: "requires", "Adding compiled module: {:?}", self.name); self.compiled_modules.insert(self.name.clone(), module); @@ -1744,6 +1931,7 @@ impl<'a> ModuleBuilder<'a> { syn: SyntaxObject { ty: TokenType::StringLiteral(s), + span, .. }, }) => { @@ -1782,7 +1970,7 @@ impl<'a> ModuleBuilder<'a> { log::info!("Searching STEEL_HOME for {:?}", current); } else { - stop!(Generic => format!("Module not found: {:?}", self.name)) + stop!(Generic => format!("Module not found: {:?} with STEEL_HOME: {:?}", current, home); *span) } } @@ -1894,7 +2082,7 @@ impl<'a> ModuleBuilder<'a> { log::info!("Searching STEEL_HOME for {:?}", current); } else { - stop!(Generic => format!("Module not found: {:?}", self.name)) + stop!(Generic => format!("Module not found: {:?}", current); r.location.span) } } @@ -2077,9 +2265,17 @@ impl<'a> ModuleBuilder<'a> { .insert(self.name.clone(), file.metadata()?.modified()?); // TODO: DEFAULT MODULE LOADER PREFIX - // let mut exprs = String::new(); + let mut exprs = String::new(); - let mut exprs = PRELUDE_STRING.to_string(); + // TODO: Don't do this - get the source from the cache? + // let mut exprs = PRELUDE_STRING.to_string(); + + let mut expressions = Parser::new(&PRELUDE_STRING, None) + .without_lowering() + .map(|x| x.and_then(lower_macro_and_require_definitions)) + .collect::, ParseError>>()?; + + // let expressions = Parser::new_from_source(, , ) // Add the modules here: @@ -2096,12 +2292,14 @@ impl<'a> ModuleBuilder<'a> { let exprs = guard.get(id).unwrap(); - let parsed = Parser::new_from_source(&exprs, self.name.clone(), Some(id)) + let mut parsed = Parser::new_from_source(&exprs, self.name.clone(), Some(id)) .without_lowering() .map(|x| x.and_then(lower_macro_and_require_definitions)) .collect::, ParseError>>()?; - self.source_ast = parsed; + expressions.append(&mut parsed); + + self.source_ast = expressions; } Ok(self) diff --git a/crates/steel-core/src/compiler/passes/analysis.rs b/crates/steel-core/src/compiler/passes/analysis.rs index 66c709882..94d9dd56a 100644 --- a/crates/steel-core/src/compiler/passes/analysis.rs +++ b/crates/steel-core/src/compiler/passes/analysis.rs @@ -5,9 +5,13 @@ use std::{ use im_rc::HashMap as ImmutableHashMap; use quickscope::ScopeMap; +use steel_parser::{ast::PROTO_HASH_GET, parser::SourceId}; use crate::{ - compiler::modules::{ModuleManager, MANGLER_SEPARATOR}, + compiler::{ + map::SymbolMap, + modules::{ModuleManager, MANGLER_SEPARATOR}, + }, parser::{ ast::{ Atom, Define, ExprKind, LambdaFunction, Let, List, Quote, STANDARD_MODULE_GET, @@ -17,6 +21,7 @@ use crate::{ interner::InternedString, parser::{RawSyntaxObject, SyntaxObject, SyntaxObjectId}, span::Span, + span_visitor::get_span, tokens::TokenType, }, steel_vm::primitives::MODULE_IDENTIFIERS, @@ -25,7 +30,7 @@ use crate::{ use super::{VisitorMutControlFlow, VisitorMutRefUnit, VisitorMutUnitRef}; -use fxhash::{FxHashMap, FxHasher}; +use fxhash::{FxHashMap, FxHashSet, FxHasher}; #[derive(Clone, Copy, Debug, PartialEq)] pub enum IdentifierStatus { @@ -63,6 +68,12 @@ pub struct SemanticInformation { pub heap_offset: Option, pub read_heap_offset: Option, pub is_shadowed: bool, + pub is_required_identifier: bool, +} + +#[test] +fn check_size_of_info() { + println!("{}", std::mem::size_of::()); } impl SemanticInformation { @@ -86,6 +97,7 @@ impl SemanticInformation { heap_offset: None, read_heap_offset: None, is_shadowed: false, + is_required_identifier: false, } } @@ -113,6 +125,10 @@ impl SemanticInformation { self.builtin = true; } + pub fn mark_required(&mut self) { + self.is_required_identifier = true; + } + pub fn with_offset(mut self, offset: usize) -> Self { self.stack_offset = Some(offset); self @@ -239,6 +255,14 @@ impl LetInformation { } } +#[derive(Debug, Clone)] +pub enum SemanticInformationType { + Variable(SemanticInformation), + Function(FunctionInformation), + CallSite(CallSiteInformation), + Let(LetInformation), +} + // Populate the metadata about individual #[derive(Default, Debug, Clone)] pub struct Analysis { @@ -250,6 +274,19 @@ pub struct Analysis { } impl Analysis { + // Reuse the analysis allocation through the process! + pub fn clear(&mut self) { + self.info.clear(); + self.function_info.clear(); + self.call_info.clear(); + self.let_info.clear(); + } + + pub fn fresh_from_exprs(&mut self, exprs: &[ExprKind]) { + self.clear(); + self.run(exprs); + } + pub fn from_exprs(exprs: &[ExprKind]) -> Self { let mut analysis = Analysis::default(); analysis.run(exprs); @@ -265,7 +302,7 @@ impl Analysis { .chain(self.let_info.values().flat_map(|x| x.arguments.values())) .filter(|x| x.captured && x.mutated) .map(|x| (x.id, x.clone())) - .collect::>(); + .collect::>(); self.function_info .values_mut() @@ -321,18 +358,24 @@ impl Analysis { semantic_info.mark_builtin(); } + if is_a_require_definition(define) { + semantic_info.mark_required(); + } + // If this variable name is already in scope, we should mark that this variable // shadows the previous id if let Some(shadowed_var) = scope.get(name) { semantic_info = semantic_info.shadows(shadowed_var.id) } - log::trace!("Defining global: {:?}", define.name); + // log::trace!("Defining global: {:?}", define.name); + // println!("Defining global: {}", define.name); define_var(scope, define); self.insert(define.name.atom_syntax_object().unwrap(), semantic_info); } + // TODO: This needs to just take an iterator? pub fn run(&mut self, exprs: &[ExprKind]) { let mut scope: ScopeMap = ScopeMap::new(); @@ -359,21 +402,44 @@ impl Analysis { for expr in exprs { let mut pass = AnalysisPass::new(self, &mut scope); - if let ExprKind::Define(define) = expr { - if define.body.lambda_function().is_some() { - // Since we're at the top level, care should be taken to actually - // refer to the defining context correctly - pass.defining_context = define.name_id(); - pass.defining_context_depth = 0; - // Continue with the rest of the body here - pass.visit(&define.body); - pass.defining_context = None; - } else { - pass.visit_top_level_define_value_without_body(define); - pass.visit(&define.body); + match expr { + ExprKind::Define(define) => { + if define.body.lambda_function().is_some() { + // Since we're at the top level, care should be taken to actually + // refer to the defining context correctly + pass.defining_context = define.name_id(); + pass.defining_context_depth = 0; + // Continue with the rest of the body here + pass.visit(&define.body); + pass.defining_context = None; + } else { + pass.visit_top_level_define_value_without_body(define); + pass.visit(&define.body); + } + } + ExprKind::Begin(b) => { + for expr in &b.exprs { + if let ExprKind::Define(define) = expr { + if define.body.lambda_function().is_some() { + // Since we're at the top level, care should be taken to actually + // refer to the defining context correctly + pass.defining_context = define.name_id(); + pass.defining_context_depth = 0; + // Continue with the rest of the body here + pass.visit(&define.body); + pass.defining_context = None; + } else { + pass.visit_top_level_define_value_without_body(define); + pass.visit(&define.body); + } + } else { + pass.visit(expr); + } + } + } + _ => { + pass.visit(expr); } - } else { - pass.visit(expr); } } } @@ -399,6 +465,7 @@ impl Analysis { existing.captured_from_enclosing = metadata.captured_from_enclosing; existing.heap_offset = metadata.heap_offset; existing.read_heap_offset = metadata.read_heap_offset; + existing.is_required_identifier = metadata.is_required_identifier; } pub fn get(&self, object: &SyntaxObject) -> Option<&SemanticInformation> { @@ -589,44 +656,30 @@ impl<'a> AnalysisPass<'a> { // If this variable name is already in scope, we should mark that this variable // shadows the previous id if let Some(shadowed_var) = self.scope.get(name) { - // println!("FOUND SHADOWED VAR: {}", name); - semantic_info = semantic_info.shadows(shadowed_var.id); if let Some(existing_analysis) = self.info.info.get_mut(&shadowed_var.id) { if existing_analysis.builtin { - // println!("FOUND A VALUE THAT SHADOWS AN EXISTING BUILTIN: {}", name); - existing_analysis.is_shadowed = true; } - // else { - // println!("DOES NOT SHADOW A BUILT IN: {}", name); - // } } } if is_a_builtin_definition(define) { - // println!("FOUND A BUILTIN: {}", name); - semantic_info.mark_builtin(); } - // if let Some(shadowed_var) = self.scope.get(name) { - // semantic_info = semantic_info.shadows(shadowed_var.id) - // } + // println!("Defining global: {}", define.name); + + if is_a_require_definition(define) { + semantic_info.mark_required(); + } if let Some(aliases) = define.is_an_alias_definition() { - log::debug!( - "Found definition that aliases - {} aliases {}: {:?} -> {:?}", - define.name, - define.body, - name_syntax_object.syntax_object_id, - define.body.atom_syntax_object().unwrap().syntax_object_id, - ); semantic_info = semantic_info.aliases_to(aliases); } - log::trace!("Defining global: {:?}", define.name); + // println!("Defining global: {}", define.name); define_var(self.scope, define); self.info.insert(name_syntax_object, semantic_info); @@ -654,10 +707,14 @@ impl<'a> AnalysisPass<'a> { semantic_info.mark_builtin(); } + if is_a_require_definition(define) { + semantic_info.mark_required(); + } + // If this variable name is already in scope, we should mark that this variable // shadows the previous id if let Some(shadowed_var) = self.scope.get(name) { - log::debug!("Redefining previous variable: {:?}", name); + // log::debug!("Redefining previous variable: {:?}", name); semantic_info = semantic_info.shadows(shadowed_var.id); } @@ -792,10 +849,9 @@ impl<'a> AnalysisPass<'a> { // TODO: merge this into one let count = arguments.get(ident).unwrap().usage_count; - if count == 0 { - // TODO: Emit warning with the span - log::debug!("Found unused argument: {:?}", ident); - } + // if count == 0 { + // log::debug!("Found unused argument: {:?}", ident); + // } semantic_info = semantic_info.with_usage_count(count); @@ -1425,12 +1481,14 @@ impl<'a> VisitorMutUnitRef<'a> for AnalysisPass<'a> { // } // var.refers - } else { - log::debug!("Unable to find var: {name} in info map to update to set!"); } - } else { - log::debug!("Variable not yet in scope: {name}"); + // else { + // log::debug!("Unable to find var: {name} in info map to update to set!"); + // } } + // else { + // log::debug!("Variable not yet in scope: {name}"); + // } } self.visit(&s.variable); @@ -1512,7 +1570,7 @@ impl<'a> VisitorMutUnitRef<'a> for AnalysisPass<'a> { if let Some(stack_offset) = mut_ref.stack_offset { semantic_info = semantic_info.with_offset(stack_offset); } else { - log::debug!("Stack offset missing from local define") + // log::debug!("Stack offset missing from local define") } if mut_ref.captured && mut_ref.mutated { @@ -1594,14 +1652,14 @@ impl<'a> VisitorMutUnitRef<'a> for AnalysisPass<'a> { // semantic_info = semantic_info.with_heap_offset(heap_offset); semantic_info = semantic_info.with_read_heap_offset(heap_offset); } else { - log::debug!("Stack offset missing from local define") + // log::debug!("Stack offset missing from local define") } if let Some(heap_offset) = captured.heap_offset { // semantic_info = semantic_info.with_heap_offset(heap_offset); semantic_info = semantic_info.with_heap_offset(heap_offset); } else { - log::debug!("Stack offset missing from local define") + // log::debug!("Stack offset missing from local define") } // if semantic_info.kind == IdentifierStatus::HeapAllocated @@ -1655,7 +1713,7 @@ impl<'a> VisitorMutUnitRef<'a> for AnalysisPass<'a> { if let Some(stack_offset) = is_captured.stack_offset { semantic_info = semantic_info.with_offset(stack_offset); } else { - log::debug!("Stack offset missing from local define") + // log::debug!("Stack offset missing from local define") } // println!("Variable {} refers to {}", ident, is_captured.id); @@ -1683,7 +1741,7 @@ impl<'a> VisitorMutUnitRef<'a> for AnalysisPass<'a> { // Otherwise, we've hit a free variable at this point // TODO: WE don't need to do this? - self.info.insert(&a.syn, semantic_info); + self.info.info.insert(a.syn.syntax_object_id, semantic_info); } // let mut semantic_info = @@ -1735,17 +1793,80 @@ impl<'a> VisitorMutUnitRef<'a> for Analysis { } } +pub fn query_top_level_define_on_condition>( + exprs: &[ExprKind], + name: A, + mut func: impl FnMut(&str, &str) -> bool, +) -> Option<&crate::parser::ast::Define> { + let mut found_defines = Vec::new(); + for expr in exprs { + log::debug!("{}", expr); + + match expr { + ExprKind::Define(d) => match d.name.atom_identifier() { + Some(n) if func(name.as_ref(), n.resolve()) => found_defines.push(d.as_ref()), + _ => {} + }, + + ExprKind::Begin(b) => { + for expr in b.exprs.iter() { + if let ExprKind::Define(d) = expr { + match d.name.atom_identifier() { + Some(n) if func(name.as_ref(), n.resolve()) => { + found_defines.push(d.as_ref()) + } + _ => {} + } + } + } + } + + _ => {} + } + } + + if found_defines.len() > 1 { + log::debug!( + "Multiple defines found, unable to find one unique value to associate with a name" + ); + return None; + } + + if found_defines.len() == 1 { + return found_defines.into_iter().next(); + } + + None +} + pub fn query_top_level_define>( exprs: &[ExprKind], name: A, ) -> Option<&crate::parser::ast::Define> { let mut found_defines = Vec::new(); for expr in exprs { - if let ExprKind::Define(d) = expr { - match d.name.atom_identifier() { + log::debug!("{}", expr); + + match expr { + ExprKind::Define(d) => match d.name.atom_identifier() { Some(n) if name.as_ref() == n.resolve() => found_defines.push(d.as_ref()), _ => {} + }, + + ExprKind::Begin(b) => { + for expr in b.exprs.iter() { + if let ExprKind::Define(d) = expr { + match d.name.atom_identifier() { + Some(n) if name.as_ref() == n.resolve() => { + found_defines.push(d.as_ref()) + } + _ => {} + } + } + } } + + _ => {} } } @@ -2013,7 +2134,7 @@ where if let ExprKind::Let(_) = &let_expr { if (self.func)(self.analysis, let_expr) { - log::debug!("Modified let expression"); + // log::debug!("Modified let expression"); } } } @@ -2065,7 +2186,7 @@ where // In the state of the analysis if (self.func)(self.analysis, list) { // return self.visit(list); - log::debug!("Modified anonymous function call site!"); + // log::debug!("Modified anonymous function call site!"); } } } @@ -2108,6 +2229,19 @@ pub(crate) fn is_a_builtin_definition(def: &Define) -> bool { false } +pub(crate) fn is_a_require_definition(def: &Define) -> bool { + if let ExprKind::List(l) = &def.body { + match l.first_ident() { + Some(func) if *func == *PROTO_HASH_GET => { + return true; + } + _ => {} + } + } + + false +} + impl<'a> VisitorMutRefUnit for RemoveUnusedDefineImports<'a> { fn visit_lambda_function(&mut self, lambda_function: &mut LambdaFunction) { self.depth += 1; @@ -2221,9 +2355,117 @@ impl<'a> VisitorMutRefUnit for RemovedUnusedImports<'a> { } } +struct FreeIdentifierVisitor<'a> { + analysis: &'a Analysis, + // Check if identifiers is in the globals list before deciding to reject it + globals: &'a SymbolMap, + + diagnostics: Vec<(InternedString, &'a SemanticInformation)>, +} + +impl<'a> VisitorMutUnitRef<'a> for FreeIdentifierVisitor<'a> { + fn visit_atom(&mut self, a: &'a Atom) { + if let Some(info) = self.analysis.get(&a.syn) { + if info.kind == IdentifierStatus::Free { + if let Some(ident) = a.ident() { + if self.globals.get(ident).is_err() { + self.diagnostics.push((*ident, info)); + } + } + } + } + } +} + +// TODO: Don't need the analysis at all +struct IdentifierFinder<'a> { + ids: &'a mut HashMap>, +} + +impl<'a> VisitorMutUnitRef<'a> for IdentifierFinder<'a> { + fn visit_atom(&mut self, a: &'a Atom) { + if let Some(ident) = a.ident() { + self.ids + .get_mut(&a.syn.syntax_object_id) + .map(|value| *value = Some(*ident)); + } + } +} + +struct FindContextsWithOffset<'a> { + analysis: &'a Analysis, + offset: usize, + source_id: SourceId, + contexts: Vec, +} + +impl<'a> VisitorMutUnitRef<'a> for FindContextsWithOffset<'a> { + fn visit_lambda_function(&mut self, lambda_function: &'a LambdaFunction) { + if lambda_function.location.span.end >= self.offset { + return; + } + + let mut span = get_span(&lambda_function.body); + + span.start = lambda_function.location.span.start; + + log::debug!("Lambda function span: {:?} - offset: {}", span, self.offset); + + // This counts, save analysis. + // TODO: Memoize the span analysis, this is not performant + if span.range().contains(&self.offset) { + // TODO: Don't clone this + if let Some(info) = self.analysis.get_function_info(lambda_function) { + if lambda_function.location.span.source_id == Some(self.source_id) { + self.contexts + .push(SemanticInformationType::Function(info.clone())); + } + } + + self.visit(&lambda_function.body); + } + } + + fn visit_let(&mut self, l: &'a Let) { + if l.location.span.end >= self.offset { + return; + } + + let mut span = get_span(&l.body_expr); + span.start = l.location.span.start; + + if span.range().contains(&self.offset) { + if let Some(info) = self.analysis.let_info.get(&l.syntax_object_id) { + if l.location.span.source_id() == Some(self.source_id) { + self.contexts + .push(SemanticInformationType::Let(info.clone())); + } + } + + l.bindings.iter().for_each(|x| self.visit(&x.1)); + self.visit(&l.body_expr); + } + } +} + +struct GlobalDefinitionFinder<'a> { + analysis: &'a Analysis, + globals: Vec<(InternedString, Span)>, +} + +impl<'a> VisitorMutUnitRef<'a> for GlobalDefinitionFinder<'a> { + fn visit_atom(&mut self, a: &'a Atom) { + if let Some(info) = self.analysis.get(&a.syn) { + if info.kind == IdentifierStatus::Global { + self.globals.push((*a.ident().unwrap(), info.span)); + } + } + } +} + struct UnusedArguments<'a> { analysis: &'a Analysis, - unused_args: Vec, + unused_args: Vec<(InternedString, Span)>, } impl<'a> UnusedArguments<'a> { @@ -2240,9 +2482,9 @@ impl<'a> VisitorMutUnitRef<'a> for UnusedArguments<'a> { for arg in &lambda_function.args { if let Some(syntax_object) = arg.atom_syntax_object() { if let Some(info) = self.analysis.get(syntax_object) { - // println!("Ident: {}, Info: {:?}", arg, info); if info.usage_count == 0 { - self.unused_args.push(syntax_object.span); + self.unused_args + .push((*arg.atom_identifier().unwrap(), syntax_object.span)); } } } @@ -2632,9 +2874,9 @@ impl<'a> VisitorMutRefUnit for LiftLocallyDefinedFunctions<'a> { ); } - for (var, _) in info.captured_vars() { - log::debug!(target: "lambda-lifting", "{}", var.resolve()); - } + // for (var, _) in info.captured_vars() { + // log::debug!(target: "lambda-lifting", "{}", var.resolve()); + // } if info.captured_vars().len() == 1 { // TODO: Check if the number of captured vars is 1, and if that 1 is equivalent to the @@ -2918,7 +3160,7 @@ impl<'a> VisitorMutRefUnit for FlattenAnonymousFunctionCalls<'a> { struct FunctionCallCollector<'a> { analysis: &'a Analysis, - functions: HashMap>, + functions: FxHashMap>, black_box: InternedString, context: Option, constants: ImmutableHashMap, @@ -2931,12 +3173,12 @@ impl<'a> FunctionCallCollector<'a> { exprs: &mut Vec, constants: ImmutableHashMap, should_mangle: bool, - ) -> HashMap> { + ) -> FxHashMap> { let black_box: InternedString = "#%black-box".into(); let mut collector = Self { analysis, - functions: HashMap::new(), + functions: FxHashMap::default(), context: None, black_box, constants, @@ -3077,7 +3319,17 @@ pub struct SemanticAnalysis<'a> { pub(crate) analysis: Analysis, } +pub enum RequiredIdentifierInformation<'a> { + Resolved(&'a SemanticInformation), + // Raw Identifier, Full path + Unresolved(InternedString, String), +} + impl<'a> SemanticAnalysis<'a> { + pub fn into_analysis(self) -> Analysis { + self.analysis + } + pub fn from_analysis(exprs: &'a mut Vec, analysis: Analysis) -> Self { Self { exprs, analysis } } @@ -3095,6 +3347,100 @@ impl<'a> SemanticAnalysis<'a> { self.analysis.get(object) } + pub fn get_identifier(&self, identifier: SyntaxObjectId) -> Option<&SemanticInformation> { + self.analysis.info.get(&identifier) + } + + // Syntax object must be the id associated with a given require define statement + pub fn resolve_required_identifier( + &self, + identifier: SyntaxObjectId, + ) -> Option> { + for expr in self.exprs.iter() { + match expr { + ExprKind::Define(d) => { + if is_a_require_definition(&d) && d.name_id() == Some(identifier) { + let module = d + .body + .list() + .and_then(|x| x.get(1)) + .and_then(|x| x.atom_identifier()) + .map(|x| x.resolve())?; + + let prefix = module.trim_start_matches("__module-").to_string() + + d.name.atom_identifier()?.resolve(); + + log::debug!("Searching for {}", prefix); + + let top_level_define = self.query_top_level_define(&prefix); + + match top_level_define { + Some(top_level_define) => { + log::debug!("Found: {}", top_level_define); + log::debug!("Span: {:?}", top_level_define.location.span); + log::debug!("Body span: {:?}", get_span(&top_level_define.body)); + + return self + .get_identifier(top_level_define.name_id()?) + .map(RequiredIdentifierInformation::Resolved); + } + None => { + return Some(RequiredIdentifierInformation::Unresolved( + *d.name.atom_identifier()?, + prefix, + )) + } + } + } + } + ExprKind::Begin(b) => { + for expr in &b.exprs { + if let ExprKind::Define(d) = expr { + if is_a_require_definition(&d) && d.name_id() == Some(identifier) { + let module = d + .body + .list() + .and_then(|x| x.get(1)) + .and_then(|x| x.atom_identifier()) + .map(|x| x.resolve())?; + + let prefix = module.trim_start_matches("__module-").to_string() + + d.name.atom_identifier()?.resolve(); + + log::debug!("Searching for {}", prefix); + let top_level_define = self.query_top_level_define(&prefix); + + match top_level_define { + Some(top_level_define) => { + log::debug!("Found: {}", top_level_define); + log::debug!("Span: {:?}", top_level_define.location.span); + log::debug!( + "Body span: {:?}", + get_span(&top_level_define.body) + ); + + return self + .get_identifier(top_level_define.name_id()?) + .map(RequiredIdentifierInformation::Resolved); + } + None => { + return Some(RequiredIdentifierInformation::Unresolved( + *d.name.atom_identifier()?, + prefix, + )) + } + } + } + } + } + } + _ => {} + } + } + + None + } + pub fn query_top_level_define>( &self, name: A, @@ -3182,8 +3528,8 @@ impl<'a> SemanticAnalysis<'a> { *self.exprs = lifter.lifted_functions; - log::debug!("Re-running the analysis after lifting local functions"); - self.analysis = Analysis::from_exprs(self.exprs); + // log::debug!("Re-running the analysis after lifting local functions"); + self.analysis.fresh_from_exprs(&self.exprs); self.analysis.populate_captures(self.exprs); } @@ -3196,10 +3542,6 @@ impl<'a> SemanticAnalysis<'a> { module_manager: &mut ModuleManager, table: &mut HashSet, ) -> &mut Self { - // for identifier in table.iter() { - // println!("Table => {}", identifier); - // } - let mut replacer = ReplaceBuiltinUsagesWithReservedPrimitiveReferences::new(&self.analysis, table); @@ -3207,10 +3549,6 @@ impl<'a> SemanticAnalysis<'a> { replacer.visit(expr); } - // for identifier in replacer.identifiers_to_replace.iter() { - // println!("Replaced => {}", identifier); - // } - let mut macro_replacer = ReplaceBuiltinUsagesInsideMacros { identifiers_to_replace: replacer.identifiers_to_replace, analysis: &self.analysis, @@ -3242,9 +3580,7 @@ impl<'a> SemanticAnalysis<'a> { macro_replacer.visit(expr); } - self.analysis = Analysis::from_exprs(self.exprs); - - // replace.vi + self.analysis.fresh_from_exprs(self.exprs); self } @@ -3391,9 +3727,9 @@ impl<'a> SemanticAnalysis<'a> { // self.exprs.push(ExprKind::ident("void")); - log::debug!("Re-running the semantic analysis after removing unused globals"); + // log::debug!("Re-running the semantic analysis after removing unused globals"); - self.analysis = Analysis::from_exprs(self.exprs); + self.analysis.fresh_from_exprs(self.exprs); self } @@ -3435,9 +3771,9 @@ impl<'a> SemanticAnalysis<'a> { self.find_let_call_sites_and_mutate_with(func); if re_run_analysis { - log::debug!("Re-running the semantic analysis after modifying let call sites"); + // log::debug!("Re-running the semantic analysis after modifying let call sites"); - self.analysis = Analysis::from_exprs(self.exprs); + self.analysis.fresh_from_exprs(self.exprs); } self @@ -3447,7 +3783,7 @@ impl<'a> SemanticAnalysis<'a> { &mut self, constants: ImmutableHashMap, should_mangle: bool, - ) -> HashMap> { + ) -> FxHashMap> { let map = FunctionCallCollector::mangle( &self.analysis, &mut self.exprs, @@ -3460,7 +3796,7 @@ impl<'a> SemanticAnalysis<'a> { .iter() .filter(|(_, v)| v.is_empty()) .map(|x| x.0.clone()) - .collect::>(); + .collect::>(); // Only constant evaluatable functions should be ones that references _other_ const functions map.into_iter() @@ -3496,7 +3832,7 @@ impl<'a> SemanticAnalysis<'a> { let analysis = analysis.get_function_info(f).unwrap(); if analysis.captured_vars.is_empty() { - log::debug!("Found a function that does not capture variables"); + // log::debug!("Found a function that does not capture variables"); if f.args.is_empty() && arg_count == 0 { // Take out the body of the function - we're going to want to use that now @@ -3523,9 +3859,10 @@ impl<'a> SemanticAnalysis<'a> { self.find_anonymous_function_calls_and_mutate_with(func); if re_run_analysis { - log::debug!("Re-running the semantic analysis after modifications"); + // log::debug!("Re-running the semantic analysis after modifications"); - self.analysis = Analysis::from_exprs(self.exprs); + // self.analysis = Analysis::from_exprs(self.exprs); + self.analysis.fresh_from_exprs(self.exprs); } self @@ -3563,7 +3900,7 @@ impl<'a> SemanticAnalysis<'a> { *anon = ExprKind::Let(let_expr.into()); re_run_analysis = true; - log::debug!("Replaced anonymous function call with let"); + // log::debug!("Replaced anonymous function call with let"); true } else { @@ -3579,9 +3916,9 @@ impl<'a> SemanticAnalysis<'a> { self.find_anonymous_function_calls_and_mutate_with(func); if re_run_analysis { - log::debug!("Re-running the semantic analysis after modifications"); + // log::debug!("Re-running the semantic analysis after modifications"); - self.analysis = Analysis::from_exprs(self.exprs); + self.analysis.fresh_from_exprs(self.exprs); } self @@ -3592,7 +3929,7 @@ impl<'a> SemanticAnalysis<'a> { RefreshVars.visit(expr); } - self.analysis = Analysis::from_exprs(self.exprs); + self.analysis.fresh_from_exprs(self.exprs); self } @@ -3646,6 +3983,23 @@ impl<'a> SemanticAnalysis<'a> { .filter(|x| x.kind == IdentifierStatus::Free) } + pub fn free_identifiers_with_globals<'b: 'a>( + &self, + globals: &'b SymbolMap, + ) -> Vec<(InternedString, &'_ SemanticInformation)> { + let mut visitor = FreeIdentifierVisitor { + analysis: &self.analysis, + globals, + diagnostics: Vec::new(), + }; + + for expr in self.exprs.iter() { + visitor.visit(expr); + } + + visitor.diagnostics + } + pub fn unused_variables(&self) -> impl Iterator { self.analysis.info.values().filter(|x| { x.usage_count == 0 @@ -3653,6 +4007,13 @@ impl<'a> SemanticAnalysis<'a> { }) } + pub fn unused_local_variables(&self) -> impl Iterator { + self.analysis + .info + .values() + .filter(|x| x.usage_count == 0 && matches!(x.kind, IdentifierStatus::Local)) + } + pub fn global_defs(&self) -> impl Iterator { self.analysis .info @@ -3660,6 +4021,19 @@ impl<'a> SemanticAnalysis<'a> { .filter(|x| x.kind == IdentifierStatus::Global) } + pub fn find_global_defs(&self) -> Vec<(InternedString, Span)> { + let mut global_finder = GlobalDefinitionFinder { + analysis: &self.analysis, + globals: Vec::new(), + }; + + for expr in self.exprs.iter() { + global_finder.visit(expr); + } + + global_finder.globals + } + pub fn built_ins(&self) -> impl Iterator { self.analysis.info.values().filter(|x| x.builtin) } @@ -3671,7 +4045,7 @@ impl<'a> SemanticAnalysis<'a> { .filter(|x| x.kind == IdentifierStatus::Free) } - pub fn find_unused_arguments(&self) -> Vec { + pub fn find_unused_arguments(&self) -> Vec<(InternedString, Span)> { let mut unused = UnusedArguments::new(&self.analysis); for expr in self.exprs.iter() { @@ -3681,6 +4055,52 @@ impl<'a> SemanticAnalysis<'a> { unused.unused_args } + pub fn find_identifier_at_offset( + &self, + offset: usize, + source_id: SourceId, + ) -> Option<(&SyntaxObjectId, &SemanticInformation)> { + self.analysis.info.iter().find(|(_, x)| { + x.span.range().contains(&offset) && x.span.source_id() == Some(source_id) + }) + } + + // Find semantic context where this offset exist. + pub fn find_contexts_with_offset( + &self, + offset: usize, + source_id: SourceId, + ) -> Vec { + let mut context_finder = FindContextsWithOffset { + analysis: &self.analysis, + offset, + contexts: Vec::new(), + source_id, + }; + + for expr in self.exprs.iter() { + context_finder.visit(expr); + } + + context_finder.contexts + } + + // Convert the syntax object ids back to interned strings. Could end up + // returning nothing if the ids are not found in the target AST, which could + // happen if the analysis gets invalidated by refreshing the vars. + pub fn syntax_object_ids_to_identifiers<'b>( + &self, + ids: &'a mut HashMap>, + ) -> &mut HashMap> { + let mut identifier_finder = IdentifierFinder { ids }; + + for expr in self.exprs.iter() { + identifier_finder.visit(expr); + } + + identifier_finder.ids + } + /// Converts function applications of the form: /// /// ((lambda (x) (global-function-call x)) foo) @@ -3702,11 +4122,11 @@ impl<'a> SemanticAnalysis<'a> { } if elider.re_run_analysis { - log::debug!( - "Re-running the semantic analysis after modifications during lambda lifting" - ); + // log::debug!( + // "Re-running the semantic analysis after modifications during lambda lifting" + // ); - self.analysis = Analysis::from_exprs(self.exprs); + self.analysis.fresh_from_exprs(self.exprs); self.analysis.populate_captures(self.exprs); } @@ -3739,7 +4159,7 @@ impl<'a> SemanticAnalysis<'a> { .iter() .map(|x| { if let ExprKind::Define(d) = x { - log::debug!("Found a local function to lift: {}", d.name); + // log::debug!("Found a local function to lift: {}", d.name); d.name.atom_syntax_object().unwrap().syntax_object_id } else { unreachable!() @@ -3822,12 +4242,13 @@ impl<'a> SemanticAnalysis<'a> { *self.exprs = overall_lifted; if re_run_analysis { - log::debug!( - "Re-running the semantic analysis after modifications during lambda lifting" - ); + // log::debug!( + // "Re-running the semantic analysis after modifications during lambda lifting" + // ); - self.analysis = Analysis::from_exprs(self.exprs); - self.analysis.populate_captures(self.exprs); + self.analysis.fresh_from_exprs(self.exprs); + // = Analysis::from_exprs(self.exprs); + // self.analysis.populate_captures(self.exprs); } self @@ -3866,6 +4287,475 @@ mod analysis_pass_tests { use super::*; + #[test] + fn test_unused_arguments() { + let script = r#" + +(define ##lambda-lifting##loop119067 + (λ (port sum) + (%plain-let + ((next-line (read-line-from-port port))) + (if (equal? (quote eof) next-line) + sum + (##lambda-lifting##loop119067 + port + (+ sum + (%plain-let ((digits (filter char-digit? next-line))) + (%plain-let ((first-digit (first digits)) (second-digit (last digits))) + (string->number (list->string (list first-digit + second-digit))))))))))) + +(define ##lambda-lifting##trie-contains-inner?119977 + (λ (node char-list) + (if (empty? char-list) + #true + (if (char=? (trie-char node) (car char-list)) + (%plain-let ((children-matched + (map (λ (node4) + (##lambda-lifting##trie-contains-inner?119977 node4 (cdr char-list))) + (trie-children node)))) + (if (empty? children-matched) #true (list? (member #true children-matched)))) + #false)))) + +(define ##lambda-lifting##loop120600 + (λ (port sum) + (%plain-let ((next-line (read-line-from-port port))) + (if (equal? (quote eof) next-line) + sum + (##lambda-lifting##loop120600 port (+ sum (process-line next-line))))))) + +(define scan + (λ (path) (%plain-let ((file (open-input-file path))) (##lambda-lifting##loop119067 file 0)))) + +(displayln (scan "aoc/day1.data")) + +(define word-map + (transduce (list (cons "one" "1") + (cons "two" "2") + (cons "three" "3") + (cons "four" "4") + (cons "five" "5") + (cons "six" "6") + (cons "seven" "7") + (cons "eight" "8") + (cons "nine" "9") + (cons "1" "1") + (cons "2" "2") + (cons "3" "3") + (cons "4" "4") + (cons "5" "5") + (cons "6" "6") + (cons "7" "7") + (cons "8" "8") + (cons "9" "9")) + (mapping (λ (pair) (cons (string->list (car pair)) (cadr pair)))) + (into-hashmap))) + +(define sample + (map + symbol->string + (quote + (two1nine eightwothree abcone2threexyz xtwone3four 4nineeightseven2 zoneight234 7pqrstsixteen)))) + +(displayln sample) + +(begin + (define ___trie-options___ + (hash (quote #:transparent) + #true + (quote #:name) + (quote trie) + (quote #:fields) + (quote (char children end-word? word-up-to)) + (quote #:printer) + (λ (obj printer-function) + (begin + (display "(") + (display (symbol->string (quote trie))) + (display " ") + (printer-function (trie-char obj)) + (display " ") + (printer-function (trie-children obj)) + (display " ") + (printer-function (trie-end-word? obj)) + (display " ") + (printer-function (trie-word-up-to obj)) + (display ")"))) + (quote #:mutable) + #false)) + (define trie (quote unintialized)) + (define struct:trie (quote uninitialized)) + (define trie? (quote uninitialized)) + (define trie-char (quote uninitialized)) + (define trie-children (quote uninitialized)) + (define trie-end-word? (quote uninitialized)) + (define trie-word-up-to (quote uninitialized)) + (%plain-let ((prototypes (make-struct-type (quote trie) 4))) + (%plain-let ((struct-type-descriptor (list-ref prototypes 0)) + (constructor-proto (list-ref prototypes 1)) + (predicate-proto (list-ref prototypes 2)) + (getter-proto (list-ref prototypes 3))) + (begin + (set! struct:trie struct-type-descriptor) + (#%vtable-update-entry! struct-type-descriptor #false ___trie-options___) + (set! trie constructor-proto) + (set! trie? predicate-proto) + (set! trie-char (λ (this) (getter-proto this 0))) + (set! trie-children (λ (this) (getter-proto this 1))) + (set! trie-end-word? (λ (this) (getter-proto this 2))) + (set! trie-word-up-to (λ (this) (getter-proto this 3))) + void)))) + +(define empty (quote ())) + +(define empty-trie (trie void empty #false empty)) + +(define flatten + (λ (lst) + (if (null? lst) + empty + (if (list? lst) (append (flatten (car lst)) (flatten (cdr lst))) (list lst))))) + +(define create-children + (λ (char-list lst prefix-chars) + (if (= (length char-list) 1) + (handle-last-letter char-list lst prefix-chars) + (handle-intern-letter char-list lst prefix-chars)))) + +(define handle-last-letter + (λ (char-list lst prefix-chars) + (%plain-let + ((char (first char-list))) + (%plain-let + ((next-prefix (push-back prefix-chars char))) + (if (empty? lst) + (list (trie char empty #true next-prefix)) + (if (< char (trie-char (first lst))) + (cons (trie char empty #true next-prefix) lst) + (if (equal? char (trie-char (first lst))) + (cons (trie char (trie-children (first lst)) #true next-prefix) (rest lst)) + (cons (first lst) (create-children char-list (rest lst) prefix-chars))))))))) + +(define handle-intern-letter + (λ (char-list lst prefix-chars) + (%plain-let + ((char (first char-list))) + (%plain-let + ((next-prefix (push-back prefix-chars char))) + (if (empty? lst) + (list (trie char (create-children (rest char-list) empty next-prefix) #false next-prefix)) + (if (< char (trie-char (first lst))) + (cons + (trie char (create-children (rest char-list) empty next-prefix) #false next-prefix) + lst) + (if (equal? char (trie-char (first lst))) + (cons + (trie char + (create-children (rest char-list) (trie-children (first lst)) next-prefix) + (trie-end-word? (first lst)) + (trie-word-up-to (first lst))) + (rest lst)) + (cons (first lst) (create-children char-list (rest lst) prefix-chars))))))))) + +(define insert + (λ (root-trie word) + (%plain-let ((char-list (string->list word))) + (trie (trie-char root-trie) + (create-children char-list (trie-children root-trie) empty) + (trie-end-word? root-trie) + (trie-word-up-to root-trie))))) + +(define triestring (trie-word-up-to trie-node)) + (flatten (map pre-order (trie-children trie-node)))) + (flatten (map pre-order (trie-children trie-node)))))) + +(define trie-contains-prefix? + (λ (root word) + (%plain-let + ((root-word-char-list (if (string? word) (string->list word) word))) + (list? (member #true + (map (λ (node) + (##lambda-lifting##trie-contains-inner?119977 node root-word-char-list)) + (trie-children root))))))) + +(define my-trie + (build-trie-from-list-of-words empty-trie + (quote ("one" "two" + "three" + "four" + "five" + "six" + "seven" + "eight" + "nine" + "1" + "2" + "3" + "4" + "5" + "6" + "7" + "8" + "9")))) + +(define check-slices + (λ (trie-root word-map line) + (%plain-let + ((line-length (length line)) (loop 123)) + (%plain-let + ((loop4 (#%box loop))) + (%plain-let ((_____loop0 (λ (offset amount accum) + (if (> (+ offset amount) line-length) + (reverse accum) + (if (trie-contains-prefix? trie-root (slice line offset amount)) + (if (hash-contains? word-map (slice line offset amount)) + ((#%unbox loop4) + (+ offset 1) + 1 + (cons (hash-get word-map (slice line offset amount)) + accum)) + ((#%unbox loop4) offset (+ 1 amount) accum)) + ((#%unbox loop4) (+ 1 offset) 1 accum)))))) + (begin + (#%set-box! loop4 _____loop0) + ((#%unbox loop4) 0 1 (quote ())))))))) + +(define process-line + (λ (line) + (%plain-let ((result (check-slices my-trie word-map (string->list line)))) + (string->number (apply string-append (list (first result) (last result))))))) + +(define scan2 + (λ (path) (%plain-let ((file (open-input-file path))) (##lambda-lifting##loop120600 file 0)))) + +(displayln (scan2 "aoc/day1.data")) + "#; + + let mut exprs = Parser::parse(script).unwrap(); + let analysis = SemanticAnalysis::new(&mut exprs); + // analysis.replace_pure_empty_lets_with_body(); + + // Log the free identifiers + let free_vars = analysis.find_unused_arguments(); + + for var in free_vars { + crate::rerrs::report_info( + ErrorKind::FreeIdentifier.to_error_code(), + "input.rkt", + script, + "unused arguments".to_string(), + var.1, + ); + } + } + + #[test] + fn test_free_identifiers() { + let script = r#" + +(begin + (define ___mcons-options___ + (hash + (quote + #:fields) + (quote + (mcar mcdr)) + (quote + #:transparent) + #false + (quote + #:name) + (quote + mcons) + (quote + #:printer) + (λ (obj printer) + (if (mlist? obj) + (begin + (simple-display "'") + (printer (mcons->list obj))) + (begin + (simple-display "'(") + (printer (mcons-mcar obj)) + (simple-display " . ") + (printer (mcons-mcdr obj)) + (simple-display ")")))) + (quote + #:mutable) + #true)) + (define mcons + (quote + unintialized)) + (define struct:mcons + (quote + uninitialized)) + (define mcons? + (quote + uninitialized)) + (define mcons-mcar + (quote + uninitialized)) + (define mcons-mcdr + (quote + uninitialized)) + (define set-mcons-mcar! + (quote + unintialized)) + (define set-mcons-mcdr! + (quote + unintialized)) + (%plain-let ((prototypes (make-struct-type + (quote + mcons) + 2))) + (%plain-let ((struct-type-descriptor (list-ref + prototypes + 0)) + (constructor-proto (list-ref prototypes 1)) + (predicate-proto (list-ref prototypes 2)) + (getter-proto (list-ref prototypes 3))) + (begin + (set! struct:mcons struct-type-descriptor) + (#%vtable-update-entry! + struct-type-descriptor + #false + ___mcons-options___) + (set! mcons + (λ (mcar mcdr) + (constructor-proto + (#%box mcar) + (#%box mcdr)))) + (set! mcons? predicate-proto) + (set! mcons-mcar + (λ (this) + (#%unbox (getter-proto this 0)))) + (set! mcons-mcdr + (λ (this) + (#%unbox (getter-proto this 1)))) + (set! set-mcons-mcar! + (λ (this value) + (#%set-box! + (getter-proto this 0) + value))) + (set! set-mcons-mcdr! + (λ (this value) + (#%set-box! + (getter-proto this 1) + value))) + void)))) + +(define ##lambda-lifting##loop118915 + (λ (mutable-cons3 builder) + (if (not + (mcons? + (%plain-let ((result (mcons-mcdr + mutable-cons3))) + (begin + (simple-display + (quote + (mcons-mcdr mutable-cons))) + (simple-display " = ") + (simple-displayln result) + result)))) + (#%prim.cons (mcons-mcar mutable-cons3) builder) + (##lambda-lifting##loop118915 + (mcons-mcdr mutable-cons3) + (#%prim.cons + (mcons-mcar mutable-cons3) + builder))))) + +(define set-car! + set-mcons-mcar!) + +(define set-cdr! + set-mcons-mcdr!) + +(define mcons->list + (λ (mutable-cons) + (reverse + (##lambda-lifting##loop118915 + mutable-cons + (quote + ()))))) + +(define mlist? + (λ (cell) + (%plain-let ((next (mcons-mcdr cell))) + (%plain-let ((z (mcons? next))) + (if z z (null? next)))))) + +(define pair? + (λ (x) + (%plain-let ((z (mcons? x))) + (if z z (#%prim.pair? x))))) + +(define cons + (λ (a b !!dummy-rest-arg!!) + (%plain-let ((!!dummy-rest-arg!!3 (apply + %keyword-hash + !!dummy-rest-arg!!))) + (%plain-let ((mutable (%plain-let ((mutable (hash-try-get + !!dummy-rest-arg!!3 + (quote + #:mutable)))) + (if (hash-contains? + !!dummy-rest-arg!!3 + (quote + #:mutable)) + mutable + #false)))) + (if mutable + (mcons a b) + (if (list? b) + (#%prim.cons a b) + (if (mcons? b) + (mcons a b) + (#%prim.cons a b)))))))) + +(define car + (λ (a) + (if (mcons? a) (mcons-mcar a) (#%prim.car a)))) + +(define cdr + (λ (a) + (if (mcons? a) (mcons-mcdr a) (#%prim.cdr a)))) + + "#; + + let mut exprs = Parser::parse(script).unwrap(); + let mut analysis = SemanticAnalysis::new(&mut exprs); + analysis.replace_pure_empty_lets_with_body(); + + // Log the free identifiers + let free_vars = analysis.find_free_identifiers(); + + for var in free_vars { + crate::rerrs::report_info( + ErrorKind::FreeIdentifier.to_error_code(), + "input.rkt", + script, + "Free identifier".to_string(), + var.span, + ); + } + } + #[test] fn local_defines() { let script = r#" diff --git a/crates/steel-core/src/compiler/passes/mangle.rs b/crates/steel-core/src/compiler/passes/mangle.rs index 212b5a822..58e39d0ac 100644 --- a/crates/steel-core/src/compiler/passes/mangle.rs +++ b/crates/steel-core/src/compiler/passes/mangle.rs @@ -79,7 +79,7 @@ impl<'a> VisitorMutRefUnit for NameUnMangler<'a> { } pub struct NameMangler { - globals: HashSet, + pub(crate) globals: HashSet, prefix: String, } diff --git a/crates/steel-core/src/compiler/passes/mod.rs b/crates/steel-core/src/compiler/passes/mod.rs index 60053977e..fedff42c6 100644 --- a/crates/steel-core/src/compiler/passes/mod.rs +++ b/crates/steel-core/src/compiler/passes/mod.rs @@ -96,7 +96,7 @@ pub trait Folder { } #[inline] - fn visit_macro(&mut self, m: Macro) -> ExprKind { + fn visit_macro(&mut self, m: Box) -> ExprKind { ExprKind::Macro(m) } @@ -112,7 +112,7 @@ pub trait Folder { } #[inline] - fn visit_syntax_rules(&mut self, l: SyntaxRules) -> ExprKind { + fn visit_syntax_rules(&mut self, l: Box) -> ExprKind { ExprKind::SyntaxRules(l) } diff --git a/crates/steel-core/src/compiler/passes/shadow.rs b/crates/steel-core/src/compiler/passes/shadow.rs index e059c6c1c..da9ab7417 100644 --- a/crates/steel-core/src/compiler/passes/shadow.rs +++ b/crates/steel-core/src/compiler/passes/shadow.rs @@ -36,22 +36,6 @@ impl RenameShadowedVariables { pub fn rename_shadowed_vars(exprs: &mut [ExprKind]) { let mut renamer = Self::new(); - // for expr in exprs.iter() { - // match expr { - // ExprKind::Define(d) => { - // d.name.atom_identifier().map(|x| renamer.scope.define(*x)); - // } - // ExprKind::Begin(b) => { - // for expr in &b.exprs { - // if let ExprKind::Define(d) = expr { - // d.name.atom_identifier().map(|x| renamer.scope.define(*x)); - // } - // } - // } - // _ => {} - // } - // } - for expr in exprs.iter_mut() { renamer.visit(expr); } @@ -71,7 +55,7 @@ impl VisitorMutRefUnit for RenameShadowedVariables { self.shadows.define(*variable, modifier); // Create a mutable string to mangle - let mut mut_var = variable.resolve().to_string(); + let mut mut_var = "##".to_string() + variable.resolve(); if let Some(char_modifier) = char::from_digit(modifier as u32, 10) { mut_var.push(char_modifier); @@ -82,6 +66,8 @@ impl VisitorMutRefUnit for RenameShadowedVariables { mut_var.push_str(self.str_modifiers.get(&modifier).unwrap()); } + // println!("Mangling variable: {}", mut_var); + *variable = mut_var.into(); self.scope.define(*variable); @@ -98,6 +84,8 @@ impl VisitorMutRefUnit for RenameShadowedVariables { self.shadows.pop_layer(); } + fn visit_quote(&mut self, _quote: &mut steel_parser::ast::Quote) {} + fn visit_atom(&mut self, a: &mut Atom) { if let Some(ident) = a.ident_mut() { if let Some(modifier) = self.shadows.get(ident) { @@ -105,7 +93,7 @@ impl VisitorMutRefUnit for RenameShadowedVariables { // Now, shadowing shouldn't actually _be_ a problem // ident.push(char::from_digit(*modifier as u32, 10).unwrap()); - let mut mut_ident = ident.resolve().to_string(); + let mut mut_ident = "##".to_string() + ident.resolve(); if let Some(char_modifier) = char::from_digit(*modifier as u32, 10) { mut_ident.push(char_modifier) @@ -135,7 +123,7 @@ impl VisitorMutRefUnit for RenameShadowedVariables { let modifier = self.scope.depth(); self.shadows.define(*variable, modifier); - let mut mut_var = variable.resolve().to_string(); + let mut mut_var = "##".to_string() + variable.resolve(); if let Some(char_modifier) = char::from_digit(modifier as u32, 10) { mut_var.push(char_modifier); diff --git a/crates/steel-core/src/compiler/program.rs b/crates/steel-core/src/compiler/program.rs index 96f217ad3..5edeb82c0 100644 --- a/crates/steel-core/src/compiler/program.rs +++ b/crates/steel-core/src/compiler/program.rs @@ -1,3 +1,5 @@ +use crate::core::labels::Expr; +use crate::parser::span_visitor::get_span; use crate::rvals::Result; use crate::{ compiler::constants::ConstantMap, @@ -106,15 +108,15 @@ pub fn specialize_constants(instructions: &mut [Instruction]) -> Result<()> { Some(Instruction { op_code: OpCode::PUSHCONST, contents: - Some(SyntaxObject { + Some(Expr::Atom(SyntaxObject { ty: TokenType::Identifier(_), .. - }), + })), .. }) => continue, Some(Instruction { op_code: OpCode::PUSHCONST, - contents: Some(syn), + contents: Some(Expr::Atom(syn)), .. }) => { let value = eval_atom(syn)?; @@ -149,7 +151,7 @@ pub fn convert_call_globals(instructions: &mut [Instruction]) { Some(Instruction { op_code: OpCode::PUSH, payload_size: index, - contents: Some(ident), + contents: Some(Expr::Atom(ident)), .. }), Some(Instruction { @@ -237,7 +239,7 @@ pub fn convert_call_globals(instructions: &mut [Instruction]) { Some(Instruction { op_code: OpCode::PUSH, payload_size: index, - contents: Some(ident), + contents: Some(Expr::Atom(ident)), .. }), Some(Instruction { @@ -405,10 +407,10 @@ pub fn inline_num_operations(instructions: &mut [Instruction]) { Some(Instruction { op_code: OpCode::FUNC | OpCode::TAILCALL, contents: - Some(RawSyntaxObject { + Some(Expr::Atom(RawSyntaxObject { ty: TokenType::Identifier(ident), .. - }), + })), payload_size, .. }), @@ -645,7 +647,7 @@ impl Program { // This way, the VM knows where to look up values #[derive(Clone)] pub struct RawProgramWithSymbols { - instructions: Vec>, + pub(crate) instructions: Vec>, pub(crate) constant_map: ConstantMap, version: String, // TODO -> this should be semver } @@ -1041,7 +1043,15 @@ fn extract_spans( .iter() .map(|x| { x.iter() - .map(|x| x.contents.as_ref().map(|x| x.span).unwrap_or_default()) + .map(|x| { + x.contents + .as_ref() + .map(|x| match x { + Expr::Atom(a) => a.span, + Expr::List(l) => get_span(l), + }) + .unwrap_or_default() + }) .collect() }) .collect(); diff --git a/crates/steel-core/src/core/instructions.rs b/crates/steel-core/src/core/instructions.rs index 3a991a1f3..159e39f14 100644 --- a/crates/steel-core/src/core/instructions.rs +++ b/crates/steel-core/src/core/instructions.rs @@ -1,8 +1,9 @@ use crate::core::opcode::OpCode; -use crate::parser::parser::SyntaxObject; use serde::{Deserialize, Serialize}; use std::convert::TryInto; +use super::labels::Expr; + /// Instruction loaded with lots of information prior to being condensed /// Includes the opcode and the payload size, plus some information /// used for locating spans and pretty error messages @@ -10,21 +11,19 @@ use std::convert::TryInto; pub struct Instruction { pub op_code: OpCode, pub payload_size: usize, - pub contents: Option, - pub constant: bool, + pub contents: Option, } impl Instruction { pub fn new_from_parts( op_code: OpCode, payload_size: usize, - contents: Option, + contents: Option, ) -> Instruction { Instruction { op_code, payload_size, contents, - constant: false, } } } @@ -96,8 +95,16 @@ pub fn disassemble(instructions: &[Instruction]) -> String { buffer.push_str(" "); if let Some(syn) = instruction.contents.as_ref() { - let contents = syn.ty.to_string(); - buffer.push_str(contents.as_str()); + match syn { + Expr::Atom(syn) => { + let contents = syn.ty.to_string(); + buffer.push_str(contents.as_str()); + } + Expr::List(l) => { + let contents = l.to_string(); + buffer.push_str(contents.as_str()); + } + } } buffer.push('\n'); diff --git a/crates/steel-core/src/core/labels.rs b/crates/steel-core/src/core/labels.rs index 77c3f85c8..ff2b97da2 100644 --- a/crates/steel-core/src/core/labels.rs +++ b/crates/steel-core/src/core/labels.rs @@ -1,11 +1,11 @@ +use serde::{Deserialize, Serialize}; +use steel_parser::ast::ExprKind; + use super::instructions::Instruction; use super::opcode::OpCode; use crate::parser::parser::SyntaxObject; -use std::{ - collections::HashMap, - sync::atomic::{AtomicUsize, Ordering}, -}; +use std::sync::atomic::{AtomicUsize, Ordering}; pub(crate) static LABEL_ID: AtomicUsize = AtomicUsize::new(0); @@ -16,11 +16,17 @@ pub fn fresh() -> Label { Label(LABEL_ID.fetch_add(1, Ordering::Relaxed)) } +#[derive(Clone, Serialize, Deserialize, Debug, PartialEq)] +pub enum Expr { + Atom(SyntaxObject), + List(ExprKind), +} + #[derive(Clone, Debug)] pub struct LabeledInstruction { pub op_code: OpCode, pub payload_size: usize, - pub contents: Option, + pub contents: Option, pub tag: Option