diff --git a/Cargo.lock b/Cargo.lock index 91cc0e4..7803fd7 100644 --- a/Cargo.lock +++ b/Cargo.lock @@ -331,6 +331,7 @@ dependencies = [ name = "cainome-parser" version = "0.1.0" dependencies = [ + "convert_case", "quote", "serde_json", "starknet", diff --git a/Cargo.toml b/Cargo.toml index a989fb3..8bdd169 100644 --- a/Cargo.toml +++ b/Cargo.toml @@ -19,6 +19,7 @@ cainome-rs = { path = "crates/rs" } anyhow = "1.0" async-trait = "0.1.68" +convert_case = "0.6.0" serde = { version = "1.0", default-features = false, features = ["alloc"] } serde_json = { version = "1.0", default-features = false, features = ["std"] } starknet = "0.8.0" @@ -38,7 +39,7 @@ anyhow.workspace = true camino = { version = "1.1.2", features = [ "serde1" ] } clap = { version = "4.2", features = [ "derive" ] } clap_complete = "4.3" -convert_case = "0.6.0" +convert_case.workspace = true serde.workspace = true serde_json.workspace = true starknet.workspace = true diff --git a/contracts/cairo0/kkrt.abi.json b/contracts/cairo0/kkrt.abi.json new file mode 100644 index 0000000..73e9e45 --- /dev/null +++ b/contracts/cairo0/kkrt.abi.json @@ -0,0 +1,275 @@ +[ + { + "data": [ + { + "name": "evm_contract_address", + "type": "felt" + }, + { + "name": "starknet_contract_address", + "type": "felt" + } + ], + "keys": [], + "name": "evm_contract_deployed", + "type": "event" + }, + { + "data": [ + { + "name": "previousOwner", + "type": "felt" + }, + { + "name": "newOwner", + "type": "felt" + } + ], + "keys": [], + "name": "OwnershipTransferred", + "type": "event" + }, + { + "inputs": [ + { + "name": "owner", + "type": "felt" + }, + { + "name": "native_token_address_", + "type": "felt" + }, + { + "name": "contract_account_class_hash_", + "type": "felt" + }, + { + "name": "externally_owned_account_class_hash", + "type": "felt" + }, + { + "name": "account_proxy_class_hash", + "type": "felt" + }, + { + "name": "deploy_fee", + "type": "felt" + } + ], + "name": "constructor", + "outputs": [], + "type": "constructor" + }, + { + "inputs": [ + { + "name": "blockhash_registry_address_", + "type": "felt" + } + ], + "name": "set_blockhash_registry", + "outputs": [], + "type": "function" + }, + { + "inputs": [], + "name": "get_blockhash_registry", + "outputs": [ + { + "name": "address", + "type": "felt" + } + ], + "stateMutability": "view", + "type": "function" + }, + { + "inputs": [ + { + "name": "native_token_address_", + "type": "felt" + } + ], + "name": "set_native_token", + "outputs": [], + "type": "function" + }, + { + "inputs": [], + "name": "get_native_token", + "outputs": [ + { + "name": "native_token_address", + "type": "felt" + } + ], + "stateMutability": "view", + "type": "function" + }, + { + "inputs": [ + { + "name": "deploy_fee_", + "type": "felt" + } + ], + "name": "set_deploy_fee", + "outputs": [], + "type": "function" + }, + { + "inputs": [], + "name": "get_deploy_fee", + "outputs": [ + { + "name": "deploy_fee", + "type": "felt" + } + ], + "stateMutability": "view", + "type": "function" + }, + { + "inputs": [ + { + "name": "evm_address", + "type": "felt" + } + ], + "name": "compute_starknet_address", + "outputs": [ + { + "name": "contract_address", + "type": "felt" + } + ], + "stateMutability": "view", + "type": "function" + }, + { + "inputs": [ + { + "name": "evm_address", + "type": "felt" + } + ], + "name": "get_starknet_address", + "outputs": [ + { + "name": "starknet_address", + "type": "felt" + } + ], + "stateMutability": "view", + "type": "function" + }, + { + "inputs": [ + { + "name": "evm_address", + "type": "felt" + } + ], + "name": "deploy_externally_owned_account", + "outputs": [ + { + "name": "starknet_contract_address", + "type": "felt" + } + ], + "type": "function" + }, + { + "inputs": [ + { + "name": "origin", + "type": "felt" + }, + { + "name": "to", + "type": "felt" + }, + { + "name": "gas_limit", + "type": "felt" + }, + { + "name": "gas_price", + "type": "felt" + }, + { + "name": "value", + "type": "felt" + }, + { + "name": "data_len", + "type": "felt" + }, + { + "name": "data", + "type": "felt*" + } + ], + "name": "eth_call", + "outputs": [ + { + "name": "return_data_len", + "type": "felt" + }, + { + "name": "return_data", + "type": "felt*" + }, + { + "name": "success", + "type": "felt" + } + ], + "stateMutability": "view", + "type": "function" + }, + { + "inputs": [ + { + "name": "to", + "type": "felt" + }, + { + "name": "gas_limit", + "type": "felt" + }, + { + "name": "gas_price", + "type": "felt" + }, + { + "name": "value", + "type": "felt" + }, + { + "name": "data_len", + "type": "felt" + }, + { + "name": "data", + "type": "felt*" + } + ], + "name": "eth_send_transaction", + "outputs": [ + { + "name": "return_data_len", + "type": "felt" + }, + { + "name": "return_data", + "type": "felt*" + }, + { + "name": "success", + "type": "felt" + } + ], + "type": "function" + } +] diff --git a/contracts/cairo0/oz0.abi.json b/contracts/cairo0/oz0.abi.json new file mode 100644 index 0000000..5c6dd55 --- /dev/null +++ b/contracts/cairo0/oz0.abi.json @@ -0,0 +1,190 @@ +[ + { + "members": [ + { + "name": "to", + "offset": 0, + "type": "felt" + }, + { + "name": "selector", + "offset": 1, + "type": "felt" + }, + { + "name": "data_offset", + "offset": 2, + "type": "felt" + }, + { + "name": "data_len", + "offset": 3, + "type": "felt" + } + ], + "name": "AccountCallArray", + "size": 4, + "type": "struct" + }, + { + "inputs": [ + { + "name": "publicKey", + "type": "felt" + } + ], + "name": "constructor", + "outputs": [], + "type": "constructor" + }, + { + "inputs": [], + "name": "getPublicKey", + "outputs": [ + { + "name": "publicKey", + "type": "felt" + } + ], + "stateMutability": "view", + "type": "function" + }, + { + "inputs": [ + { + "name": "interfaceId", + "type": "felt" + } + ], + "name": "supportsInterface", + "outputs": [ + { + "name": "success", + "type": "felt" + } + ], + "stateMutability": "view", + "type": "function" + }, + { + "inputs": [ + { + "name": "newPublicKey", + "type": "felt" + } + ], + "name": "setPublicKey", + "outputs": [], + "type": "function" + }, + { + "inputs": [ + { + "name": "hash", + "type": "felt" + }, + { + "name": "signature_len", + "type": "felt" + }, + { + "name": "signature", + "type": "felt*" + } + ], + "name": "isValidSignature", + "outputs": [ + { + "name": "isValid", + "type": "felt" + } + ], + "stateMutability": "view", + "type": "function" + }, + { + "inputs": [ + { + "name": "call_array_len", + "type": "felt" + }, + { + "name": "call_array", + "type": "AccountCallArray*" + }, + { + "name": "calldata_len", + "type": "felt" + }, + { + "name": "calldata", + "type": "felt*" + } + ], + "name": "__validate__", + "outputs": [], + "type": "function" + }, + { + "inputs": [ + { + "name": "class_hash", + "type": "felt" + } + ], + "name": "__validate_declare__", + "outputs": [], + "type": "function" + }, + { + "inputs": [ + { + "name": "class_hash", + "type": "felt" + }, + { + "name": "salt", + "type": "felt" + }, + { + "name": "publicKey", + "type": "felt" + } + ], + "name": "__validate_deploy__", + "outputs": [], + "type": "function" + }, + { + "inputs": [ + { + "name": "call_array_len", + "type": "felt" + }, + { + "name": "call_array", + "type": "AccountCallArray*" + }, + { + "name": "calldata_len", + "type": "felt" + }, + { + "name": "calldata", + "type": "felt*" + } + ], + "name": "__execute__", + "outputs": [ + { + "name": "response_len", + "type": "felt" + }, + { + "name": "response", + "type": "felt*" + } + ], + "type": "function" + } +] diff --git a/crates/cairo-serde/src/types/array_legacy.rs b/crates/cairo-serde/src/types/array_legacy.rs index a28c20d..62ac335 100644 --- a/crates/cairo-serde/src/types/array_legacy.rs +++ b/crates/cairo-serde/src/types/array_legacy.rs @@ -60,7 +60,7 @@ where let mut offset = offset; let len = felts[offset - 1]; - if FieldElement::from(offset) + len >= FieldElement::from(felts.len()) { + if FieldElement::from(offset) + len > FieldElement::from(felts.len()) { return Err(Error::Deserialize(format!( "Buffer too short to deserialize an array of length {}: offset ({}) : buffer {:?}", len, offset, felts, @@ -80,3 +80,20 @@ where Ok(CairoArrayLegacy(out)) } } + +#[cfg(test)] +mod tests { + use super::*; + use starknet::macros::felt; + + #[test] + fn array_offset_len_ok() { + let serialized = vec![felt!("4"), felt!("1"), felt!("2"), felt!("3"), felt!("4")]; + let a = CairoArrayLegacy::::cairo_deserialize(&serialized, 1).unwrap(); + assert_eq!(a.len(), 4); + assert_eq!(a.0[0], felt!("1")); + assert_eq!(a.0[1], felt!("2")); + assert_eq!(a.0[2], felt!("3")); + assert_eq!(a.0[3], felt!("4")); + } +} diff --git a/crates/parser/Cargo.toml b/crates/parser/Cargo.toml index 5e5c6c2..95ef30c 100644 --- a/crates/parser/Cargo.toml +++ b/crates/parser/Cargo.toml @@ -4,6 +4,7 @@ version = "0.1.0" edition = "2021" [dependencies] +convert_case.workspace = true starknet.workspace = true thiserror.workspace = true syn = { version = "2.0", features = [ "extra-traits" ]} diff --git a/crates/parser/src/abi/conversions.rs b/crates/parser/src/abi/conversions.rs index 1416d28..4dbdb03 100644 --- a/crates/parser/src/abi/conversions.rs +++ b/crates/parser/src/abi/conversions.rs @@ -1,4 +1,5 @@ use starknet::core::types::contract::{ + legacy::{RawLegacyEvent, RawLegacyStruct}, AbiEnum, AbiEventEnum, AbiEventStruct, AbiStruct, EventFieldKind, StateMutability as StarknetStateMutability, }; @@ -175,3 +176,75 @@ impl TryFrom<&AbiEventEnum> for Token { } } } + +impl TryFrom<&RawLegacyStruct> for Token { + type Error = Error; + + fn try_from(value: &RawLegacyStruct) -> Result { + let mut t = Token::parse(&value.name)?; + + if let Token::Composite(ref mut c) = t { + c.r#type = CompositeType::Struct; + + for (i, m) in value.members.iter().enumerate() { + c.inners.push(CompositeInner { + index: i, + name: m.name.clone(), + token: Token::parse(&m.r#type).unwrap(), + kind: CompositeInnerKind::NotUsed, + }); + } + + Ok(t) + } else { + Err(Error::ParsingFailed(format!( + "RawLegacyStruct is expected to be a Composite token, got `{:?}`", + value, + ))) + } + } +} + +impl TryFrom<&RawLegacyEvent> for Token { + type Error = Error; + + fn try_from(value: &RawLegacyEvent) -> Result { + let mut t = Token::parse(&value.name)?; + + if let Token::Composite(ref mut c) = t { + c.r#type = CompositeType::Struct; + c.is_event = true; + + let mut i = 0; + + for m in value.data.iter() { + c.inners.push(CompositeInner { + index: i, + name: m.name.clone(), + token: Token::parse(&m.r#type).unwrap(), + kind: CompositeInnerKind::Data, + }); + + i += 1; + } + + for m in value.keys.iter() { + c.inners.push(CompositeInner { + index: i, + name: m.name.clone(), + token: Token::parse(&m.r#type).unwrap(), + kind: CompositeInnerKind::Key, + }); + + i += 1; + } + + Ok(t) + } else { + Err(Error::ParsingFailed(format!( + "RawLegacyEvent is expected to be a Composite token, got `{:?}`", + value, + ))) + } + } +} diff --git a/crates/parser/src/abi/mod.rs b/crates/parser/src/abi/mod.rs index 0748000..4270db7 100644 --- a/crates/parser/src/abi/mod.rs +++ b/crates/parser/src/abi/mod.rs @@ -1,4 +1,5 @@ pub mod parser; +pub mod parser_legacy; mod conversions; diff --git a/crates/parser/src/abi/parser_legacy.rs b/crates/parser/src/abi/parser_legacy.rs new file mode 100644 index 0000000..920f19c --- /dev/null +++ b/crates/parser/src/abi/parser_legacy.rs @@ -0,0 +1,191 @@ +use starknet::core::types::contract::legacy::{ + RawLegacyAbiEntry, RawLegacyMember, RawLegacyStruct, +}; +use starknet::core::types::contract::StateMutability; +use std::collections::HashMap; + +use crate::tokens::{Composite, CompositeType, CoreBasic, Function, Token}; +use crate::{CainomeResult, Error, TokenizedAbi}; + +pub struct AbiParserLegacy {} + +impl AbiParserLegacy { + /// Generates the [`Token`]s from the given ABI string. + /// + /// # Arguments + /// + /// * `abi` - A string representing the ABI (a JSON array of `RawLegacyAbiEntry`). + /// * `type_aliases` - Types to be renamed to avoid name clashing of generated types. + pub fn tokens_from_abi_string( + abi: &str, + type_aliases: &HashMap, + ) -> CainomeResult { + let abi_entries = Self::parse_abi_string(abi)?; + let tokenized_abi = + Self::collect_tokens(&abi_entries, type_aliases).expect("failed tokens parsing"); + + Ok(tokenized_abi) + } + + /// Parses an ABI string to output a `Vec`. + /// + /// # Arguments + /// + /// * `abi` - A string representing the ABI (a JSON array of `RawLegacyAbiEntry`). + pub fn parse_abi_string(abi: &str) -> CainomeResult> { + let entries = + serde_json::from_str::>(abi).map_err(Error::SerdeJson)?; + Ok(entries) + } + + /// Parse all tokens in the ABI. + pub fn collect_tokens( + entries: &[RawLegacyAbiEntry], + type_aliases: &HashMap, + ) -> CainomeResult { + let mut tokens: HashMap = HashMap::new(); + + for entry in entries { + Self::collect_entry_token(entry, &mut tokens)?; + } + + let mut structs = vec![]; + let mut enums = vec![]; + // This is not memory efficient, but + // currently the focus is on search speed. + // To be optimized. + let mut all_composites: HashMap = HashMap::new(); + + // Apply type aliases only on structs and enums. + for (_, mut t) in tokens { + for (type_path, alias) in type_aliases { + t.apply_alias(type_path, alias); + } + + if let Token::Composite(ref c) = t { + all_composites.insert(c.type_path_no_generic(), c.clone()); + + match c.r#type { + CompositeType::Struct => structs.push(t), + CompositeType::Enum => enums.push(t), + _ => (), + } + } + } + + let mut functions = vec![]; + + for entry in entries { + Self::collect_entry_function(entry, &mut all_composites, &mut structs, &mut functions)?; + } + + let interfaces: HashMap> = HashMap::new(); + + Ok(TokenizedAbi { + enums, + structs, + functions, + interfaces, + }) + } + + /// + fn collect_entry_token( + entry: &RawLegacyAbiEntry, + tokens: &mut HashMap, + ) -> CainomeResult<()> { + match entry { + RawLegacyAbiEntry::Struct(s) => { + // Some struct may be basics, we want to skip them. + if CoreBasic::parse(&s.name).is_ok() { + return Ok(()); + }; + + let token: Token = s.try_into()?; + tokens.insert(token.type_path(), token); + } + RawLegacyAbiEntry::Event(ev) => { + let token: Token = ev.try_into()?; + tokens.insert(token.type_path(), token); + } + _ => (), + }; + + Ok(()) + } + + /// + fn collect_entry_function( + entry: &RawLegacyAbiEntry, + all_composites: &mut HashMap, + structs: &mut Vec, + functions: &mut Vec, + ) -> CainomeResult<()> { + /// Gets the existing token into known composite, if any. + /// Otherwise, return the parsed token. + fn get_existing_token_or_parsed( + type_path: &str, + all_composites: &HashMap, + ) -> CainomeResult { + let parsed_token = Token::parse(type_path)?; + + // If the token is an known struct or enum, we look up + // in existing one to get full info from there as the parsing + // of composites is already done before functions. + if let Token::Composite(ref c) = parsed_token { + match all_composites.get(&c.type_path_no_generic()) { + Some(e) => Ok(Token::Composite(e.clone())), + None => Ok(parsed_token), + } + } else { + Ok(parsed_token) + } + } + + // TODO: optimize the search and data structures. + // HashMap would be more appropriate than vec. + if let RawLegacyAbiEntry::Function(f) = entry { + // Looks like in Cairo 0 ABI, if no mutability is given, it's an external. + let mutability = match f.state_mutability { + Some(_) => StateMutability::View, + None => StateMutability::External, + }; + + let mut func = Function::new(&f.name, mutability.into()); + + for i in &f.inputs { + let token = get_existing_token_or_parsed(&i.r#type, all_composites)?; + func.inputs.push((i.name.clone(), token)); + } + + for o in &f.outputs { + let token = get_existing_token_or_parsed(&o.r#type, all_composites)?; + func.named_outputs.push((o.name.clone(), token)); + } + + if !func.named_outputs.is_empty() { + let mut members = vec![]; + + for (offset, (n, t)) in func.named_outputs.iter().enumerate() { + members.push(RawLegacyMember { + name: n.clone(), + offset: offset.try_into().unwrap(), + r#type: t.type_path().clone(), + }); + } + + let s = RawLegacyStruct { + members, + name: func.get_cairo0_output_name(), + size: func.named_outputs.len() as u64, + }; + + structs.push((&s).try_into()?); + } + + functions.push(Token::Function(func)); + } + + Ok(()) + } +} diff --git a/crates/parser/src/lib.rs b/crates/parser/src/lib.rs index 1883167..6beda87 100644 --- a/crates/parser/src/lib.rs +++ b/crates/parser/src/lib.rs @@ -3,5 +3,6 @@ pub use error::{CainomeResult, Error}; mod abi; pub use crate::abi::parser::{AbiParser, TokenizedAbi}; +pub use crate::abi::parser_legacy::AbiParserLegacy; pub mod tokens; diff --git a/crates/parser/src/tokens/array.rs b/crates/parser/src/tokens/array.rs index 416ed05..23e093b 100644 --- a/crates/parser/src/tokens/array.rs +++ b/crates/parser/src/tokens/array.rs @@ -1,12 +1,16 @@ use super::constants::CAIRO_CORE_SPAN_ARRAY; use super::genericity; -use super::Token; + +use crate::tokens::Token; use crate::{CainomeResult, Error}; +pub const CAIRO_0_ARRAY: &str = "*"; + #[derive(Debug, Clone, PartialEq)] pub struct Array { pub type_path: String, pub inner: Box, + pub is_legacy: bool, } impl Array { @@ -28,10 +32,19 @@ impl Array { return Ok(Self { type_path: type_path.to_string(), inner: Box::new(generic_arg_token.clone()), + is_legacy: false, }); } } + if let Some(inner_type) = type_path.strip_suffix(CAIRO_0_ARRAY) { + return Ok(Self { + type_path: type_path.to_string(), + inner: Box::new(Token::parse(inner_type)?), + is_legacy: true, + }); + } + Err(Error::TokenInitFailed(format!( "Array/Span couldn't be initialized from `{}`.", type_path, @@ -45,6 +58,7 @@ impl Array { Token::Array(Self { type_path: self.type_path.clone(), inner: Box::new(self.inner.resolve_generic(generic_name, generic_type_path)), + is_legacy: self.is_legacy, }) } } @@ -68,6 +82,7 @@ mod tests { inner: Box::new(Token::CoreBasic(CoreBasic { type_path: "core::felt252".to_string() })), + is_legacy: false, } ); } diff --git a/crates/parser/src/tokens/composite.rs b/crates/parser/src/tokens/composite.rs index fd778b3..a1b277f 100644 --- a/crates/parser/src/tokens/composite.rs +++ b/crates/parser/src/tokens/composite.rs @@ -195,6 +195,7 @@ mod tests { Token::Array(Array { type_path: "core::array::Array::".to_string(), inner: Box::new(basic_felt252()), + is_legacy: false, }) } @@ -367,6 +368,7 @@ mod tests { Token::Array(Array { type_path: "core::array::Array::".to_string(), inner: Box::new(Token::GenericArg("A".to_string())), + is_legacy: false, }), ); } diff --git a/crates/parser/src/tokens/constants.rs b/crates/parser/src/tokens/constants.rs index ab26180..6e26580 100644 --- a/crates/parser/src/tokens/constants.rs +++ b/crates/parser/src/tokens/constants.rs @@ -1,4 +1,5 @@ -pub const CAIRO_CORE_BASIC: [&str; 16] = [ +pub const CAIRO_CORE_BASIC: [&str; 17] = [ + "felt", "core::felt252", "core::bool", "core::integer::u8", diff --git a/crates/parser/src/tokens/function.rs b/crates/parser/src/tokens/function.rs index 5b0ee70..1365115 100644 --- a/crates/parser/src/tokens/function.rs +++ b/crates/parser/src/tokens/function.rs @@ -1,3 +1,5 @@ +use convert_case::{Case, Casing}; + use super::Token; #[derive(Debug, Clone, PartialEq)] @@ -6,12 +8,21 @@ pub enum StateMutability { View, } +#[derive(Debug)] +pub enum FunctionOutputKind { + NoOutput, + Cairo1, + Cairo0, +} + #[derive(Debug, Clone, PartialEq)] pub struct Function { pub name: String, pub state_mutability: StateMutability, pub inputs: Vec<(String, Token)>, pub outputs: Vec, + // Only cairo0 has named outputs. + pub named_outputs: Vec<(String, Token)>, } impl Function { @@ -21,6 +32,7 @@ impl Function { state_mutability, inputs: vec![], outputs: vec![], + named_outputs: vec![], } } @@ -37,4 +49,20 @@ impl Function { } } } + + pub fn get_output_kind(&self) -> FunctionOutputKind { + match (self.outputs.is_empty(), self.named_outputs.is_empty()) { + (true, true) => FunctionOutputKind::NoOutput, + (false, true) => FunctionOutputKind::Cairo1, + (true, false) => FunctionOutputKind::Cairo0, + (false, false) => panic!("Function's outputs and named outputs are exclusive!"), + } + } + + pub fn get_cairo0_output_name(&self) -> String { + format!( + "{}Output", + self.name.from_case(Case::Snake).to_case(Case::Pascal) + ) + } } diff --git a/crates/parser/src/tokens/mod.rs b/crates/parser/src/tokens/mod.rs index cc38cfb..ddb8ee3 100644 --- a/crates/parser/src/tokens/mod.rs +++ b/crates/parser/src/tokens/mod.rs @@ -13,7 +13,7 @@ mod tuple; pub use array::Array; pub use basic::CoreBasic; pub use composite::{Composite, CompositeInner, CompositeInnerKind, CompositeType}; -pub use function::{Function, StateMutability}; +pub use function::{Function, FunctionOutputKind, StateMutability}; pub use tuple::Tuple; use crate::{CainomeResult, Error}; diff --git a/crates/rs-macro/README.md b/crates/rs-macro/README.md index 11ce8bd..488715b 100644 --- a/crates/rs-macro/README.md +++ b/crates/rs-macro/README.md @@ -17,6 +17,16 @@ use cainome::rs::abigen; abigen!(MyContract, "/path/my_contract.json"); ``` +Cairo 0 support is limited (event are not parsed yet), but to interact with a cairo 0 +program you can use the legacy macro: + +```rust +// Rust code +use cainome::rs::abigen; + +abigen_legacy!(MyContract, "/path/cairo_0.json"); +``` + ## Usage For examples, please refer to the [examples](../../examples) folder. @@ -26,7 +36,7 @@ The `abigen!` macro takes 2 or 3 inputs: 1. The name you want to assign to the contract type being generated. 2. Path to the JSON file containing the ABI. This file can have two format: - - The entire Sierra file (`*.contract_class.json`) + - The entire Sierra file (`*.contract_class.json`) [**Only for Cairo 1**] - Only the array of ABI entries. These can be easily extracted with `jq` doing the following: ``` @@ -160,6 +170,31 @@ The expansion of the macros generates the following: }; ``` +- For cairo 0 contracts, for each method that has at least one output, cainome will generate a `struct` with the output fields. + + ```json + { + "inputs": [], + "name": "get_blockhash_registry", + "outputs": [ + { + "name": "address", + "type": "felt" + } + ], + "stateMutability": "view", + "type": "function" + } + ``` + + Will generate with the function's name in PascalCase and the suffix `Output`: + + ```rust + pub struct GetBlockhashRegistryOutput { + pub address: starknet::core::types::FieldElement, + } + ``` + ## Known limitation With the current state of the parser, here are some limitations: diff --git a/crates/rs-macro/src/lib.rs b/crates/rs-macro/src/lib.rs index d58c8dc..1bc7561 100644 --- a/crates/rs-macro/src/lib.rs +++ b/crates/rs-macro/src/lib.rs @@ -1,23 +1,30 @@ -use cainome_parser::AbiParser; +use cainome_parser::{AbiParser, AbiParserLegacy}; use cainome_rs::{self}; use proc_macro::TokenStream; use quote::quote; mod macro_inputs; +mod macro_inputs_legacy; mod spanned; use crate::macro_inputs::ContractAbi; +use crate::macro_inputs_legacy::ContractAbiLegacy; #[proc_macro] pub fn abigen(input: TokenStream) -> TokenStream { abigen_internal(input) } +#[proc_macro] +pub fn abigen_legacy(input: TokenStream) -> TokenStream { + abigen_internal_legacy(input) +} + fn abigen_internal(input: TokenStream) -> TokenStream { let contract_abi = syn::parse_macro_input!(input as ContractAbi); - let contract_name = contract_abi.name; let abi_entries = contract_abi.abi; + let contract_name = contract_abi.name; let abi_tokens = AbiParser::collect_tokens(&abi_entries, &contract_abi.type_aliases) .expect("failed tokens parsing"); @@ -36,3 +43,27 @@ fn abigen_internal(input: TokenStream) -> TokenStream { expanded.into() } } + +fn abigen_internal_legacy(input: TokenStream) -> TokenStream { + let contract_abi = syn::parse_macro_input!(input as ContractAbiLegacy); + + let abi_entries = contract_abi.abi; + let contract_name = contract_abi.name; + + let abi_tokens = AbiParserLegacy::collect_tokens(&abi_entries, &contract_abi.type_aliases) + .expect("failed tokens parsing"); + + let expanded = cainome_rs::abi_to_tokenstream(&contract_name.to_string(), &abi_tokens); + + if let Some(out_path) = contract_abi.output_path { + let content: String = expanded.to_string(); + match std::fs::write(out_path, content) { + Ok(_) => (), + Err(e) => panic!("Failed to write to file: {}", e), + } + + quote!().into() + } else { + expanded.into() + } +} diff --git a/crates/rs-macro/src/macro_inputs_legacy.rs b/crates/rs-macro/src/macro_inputs_legacy.rs new file mode 100644 index 0000000..728dded --- /dev/null +++ b/crates/rs-macro/src/macro_inputs_legacy.rs @@ -0,0 +1,160 @@ +//! Defines the arguments of the `abigen` macro. +//! +//! `ContractAbi` is expected to be the argument +//! passed to the macro. We should then parse the +//! token stream to ensure the arguments are correct. +//! +//! At this moment, the macro supports one fashion: +//! +//! Loading from a file with only the ABI array. +//! abigen!(ContractName, "path/to/abi.json" +//! +//! TODO: support the full artifact JSON to be able to +//! deploy contracts from abigen. +use quote::ToTokens; +use starknet::core::types::contract::legacy::RawLegacyAbiEntry; +use std::collections::{HashMap, HashSet}; +use std::fs::File; +use std::path::Path; +use syn::{ + braced, + ext::IdentExt, + parenthesized, + parse::{Parse, ParseStream, Result}, + Ident, LitStr, Token, Type, +}; + +use crate::spanned::Spanned; + +const CARGO_MANIFEST_DIR: &str = "$CARGO_MANIFEST_DIR/"; + +#[derive(Clone, Debug)] +pub(crate) struct ContractAbiLegacy { + pub name: Ident, + pub abi: Vec, + pub output_path: Option, + pub type_aliases: HashMap, +} + +impl Parse for ContractAbiLegacy { + fn parse(input: ParseStream) -> Result { + let name = input.parse::()?; + input.parse::()?; + + // ABI path or content. + + // Path rooted to the Cargo.toml location if it's a file. + let abi_or_path = input.parse::()?; + + #[allow(clippy::collapsible_else_if)] + let abi = if abi_or_path.value().ends_with(".json") { + let json_path = if abi_or_path.value().starts_with(CARGO_MANIFEST_DIR) { + let manifest_dir = env!("CARGO_MANIFEST_DIR"); + let new_dir = Path::new(manifest_dir) + .join(abi_or_path.value().trim_start_matches(CARGO_MANIFEST_DIR)) + .to_string_lossy() + .to_string(); + + LitStr::new(&new_dir, proc_macro2::Span::call_site()) + } else { + abi_or_path + }; + + serde_json::from_reader::<_, Vec>(open_json_file( + &json_path.value(), + )?) + .map_err(|e| syn::Error::new(json_path.span(), format!("JSON parse error: {}", e)))? + } else { + serde_json::from_str::>(&abi_or_path.value()).map_err(|e| { + syn::Error::new(abi_or_path.span(), format!("JSON parse error: {}", e)) + })? + }; + + let mut output_path: Option = None; + let mut type_aliases = HashMap::new(); + + loop { + if input.parse::().is_err() { + break; + } + + let name = match Ident::parse_any(input) { + Ok(n) => n, + Err(_) => break, + }; + + match name.to_string().as_str() { + "type_aliases" => { + let content; + braced!(content in input); + let parsed = + content.parse_terminated(Spanned::::parse, Token![;])?; + + let mut abi_types = HashSet::new(); + let mut aliases = HashSet::new(); + + for type_alias in parsed { + if !abi_types.insert(type_alias.abi.clone()) { + panic!("{} duplicate abi type", type_alias.abi) + } + if !aliases.insert(type_alias.alias.clone()) { + panic!("{} duplicate alias name", type_alias.alias) + } + + let ta = type_alias.into_inner(); + type_aliases.insert(ta.abi, ta.alias); + } + } + "output_path" => { + let content; + parenthesized!(content in input); + output_path = Some(content.parse::()?.value()); + } + _ => panic!("unexpected named parameter `{}`", name), + } + } + + Ok(ContractAbiLegacy { + name, + abi, + output_path, + type_aliases, + }) + } +} + +#[derive(Clone, Debug, PartialEq, Eq)] +pub(crate) struct TypeAlias { + abi: String, + alias: String, +} + +impl Parse for TypeAlias { + fn parse(input: ParseStream) -> Result { + let abi = input + .parse::()? + .into_token_stream() + .to_string() + .replace(' ', ""); + + input.parse::()?; + + let alias = input.parse::()?.to_string(); + + Ok(TypeAlias { abi, alias }) + } +} + +fn open_json_file(file_path: &str) -> Result { + File::open(file_path).map_err(|e| { + syn::Error::new( + str_to_litstr(file_path).span(), + format!("JSON open file {} error: {}", file_path, e), + ) + }) +} + +/// +pub fn str_to_litstr(str_in: &str) -> LitStr { + LitStr::new(str_in, proc_macro::Span::call_site().into()) +} diff --git a/crates/rs/src/expand/function.rs b/crates/rs/src/expand/function.rs index 2d9f018..821a888 100644 --- a/crates/rs/src/expand/function.rs +++ b/crates/rs/src/expand/function.rs @@ -19,7 +19,7 @@ //! ```ignore (pseudo-code) //! // TODO //! ``` -use cainome_parser::tokens::{Function, StateMutability, Token}; +use cainome_parser::tokens::{Function, FunctionOutputKind, StateMutability, Token}; use proc_macro2::TokenStream as TokenStream2; use quote::quote; @@ -60,14 +60,16 @@ impl CairoFunction { serializations.push(ser); } - let out_type = if func.outputs.is_empty() { - quote!(()) - } else { - // We consider only one type for Cairo 1, if any. - // The outputs field is a list for historical reason from Cairo 0 - // were tuples were used as returned values. - let out_type = utils::str_to_type(&func.outputs[0].to_rust_type_path()); - quote!(#out_type) + let out_type = match func.get_output_kind() { + FunctionOutputKind::NoOutput => quote!(()), + FunctionOutputKind::Cairo1 => { + let out_type = utils::str_to_type(&func.outputs[0].to_rust_type_path()); + quote!(#out_type) + } + FunctionOutputKind::Cairo0 => { + let out_type = utils::str_to_type(&func.get_cairo0_output_name()); + quote!(#out_type) + } }; let inputs = get_func_inputs(&func.inputs); diff --git a/crates/rs/src/expand/struct.rs b/crates/rs/src/expand/struct.rs index ab66000..8c49241 100644 --- a/crates/rs/src/expand/struct.rs +++ b/crates/rs/src/expand/struct.rs @@ -21,7 +21,14 @@ impl CairoStruct { let name = utils::str_to_ident(&inner.name); let ty = utils::str_to_type(&inner.token.to_rust_type()); - members.push(quote!(#name: #ty)); + // r#{name} is not a valid identifier, thus we can't create an ident. + // And with proc macro 2, we cannot do `quote!(r##name)`. + // TODO: this needs to be done more elegantly... + if &inner.name == "type" { + members.push(quote!(r#type: #ty)); + } else { + members.push(quote!(#name: #ty)); + } } if composite.is_generic() { @@ -67,8 +74,6 @@ impl CairoStruct { for inner in &composite.inners { let name = utils::str_to_ident(&inner.name); - names.push(quote!(#name)); - let ty = utils::str_to_type(&inner.token.to_rust_type_path()); // Tuples type used as rust type path item path must be surrounded @@ -78,16 +83,36 @@ impl CairoStruct { _ => quote!(#ty), }; - sizes.push(quote! { - __size += #ty_punctuated::cairo_serialized_size(&__rust.#name); - }); + // r#{name} is not a valid identifier, thus we can't create an ident. + // And with proc macro 2, we cannot do `quote!(r##name)`. + // TODO: this needs to be done more elegantly... + if &inner.name == "type" { + names.push(quote!(r#type)); + + sizes.push(quote! { + __size += #ty_punctuated::cairo_serialized_size(&__rust.r#type); + }); + + sers.push(quote!(__out.extend(#ty_punctuated::cairo_serialize(&__rust.r#type));)); - sers.push(quote!(__out.extend(#ty_punctuated::cairo_serialize(&__rust.#name));)); + desers.push(quote! { + let r#type = #ty_punctuated::cairo_deserialize(__felts, __offset)?; + __offset += #ty_punctuated::cairo_serialized_size(&r#type); + }); + } else { + names.push(quote!(#name)); - desers.push(quote! { - let #name = #ty_punctuated::cairo_deserialize(__felts, __offset)?; - __offset += #ty_punctuated::cairo_serialized_size(&#name); - }); + sizes.push(quote! { + __size += #ty_punctuated::cairo_serialized_size(&__rust.#name); + }); + + sers.push(quote!(__out.extend(#ty_punctuated::cairo_serialize(&__rust.#name));)); + + desers.push(quote! { + let #name = #ty_punctuated::cairo_deserialize(__felts, __offset)?; + __offset += #ty_punctuated::cairo_serialized_size(&#name); + }); + } } let ccs = utils::cainome_cairo_serde(); diff --git a/crates/rs/src/expand/types.rs b/crates/rs/src/expand/types.rs index 755a6a4..969a194 100644 --- a/crates/rs/src/expand/types.rs +++ b/crates/rs/src/expand/types.rs @@ -12,7 +12,14 @@ impl CairoToRust for Token { fn to_rust_type(&self) -> String { match self { Token::CoreBasic(t) => basic_types_to_rust(&t.type_name()), - Token::Array(t) => format!("Vec<{}>", t.inner.to_rust_type()), + Token::Array(t) => { + if t.is_legacy { + let ccsp = utils::cainome_cairo_serde_path(); + format!("{}::CairoArrayLegacy<{}>", ccsp, t.inner.to_rust_type()) + } else { + format!("Vec<{}>", t.inner.to_rust_type()) + } + } Token::Tuple(t) => { let mut s = String::from("("); @@ -56,7 +63,18 @@ impl CairoToRust for Token { fn to_rust_type_path(&self) -> String { match self { Token::CoreBasic(t) => basic_types_to_rust(&t.type_name()), - Token::Array(t) => format!("Vec::<{}>", t.inner.to_rust_type_path()), + Token::Array(t) => { + if t.is_legacy { + let ccsp = utils::cainome_cairo_serde_path(); + format!( + "{}::CairoArrayLegacy::<{}>", + ccsp, + t.inner.to_rust_type_path() + ) + } else { + format!("Vec::<{}>", t.inner.to_rust_type_path()) + } + } Token::Tuple(t) => { let mut s = String::from("("); for (idx, inner) in t.inners.iter().enumerate() { @@ -104,6 +122,7 @@ fn basic_types_to_rust(type_name: &str) -> String { "ContractAddress" => format!("{ccsp}::ContractAddress"), "EthAddress" => format!("{ccsp}::EthAddress"), "felt252" => "starknet::core::types::FieldElement".to_string(), + "felt" => "starknet::core::types::FieldElement".to_string(), "bytes31" => format!("{ccsp}::Bytes31"), "ByteArray" => format!("{ccsp}::ByteArray"), _ => type_name.to_string(), diff --git a/examples/cairo0.rs b/examples/cairo0.rs new file mode 100644 index 0000000..419cbb2 --- /dev/null +++ b/examples/cairo0.rs @@ -0,0 +1,6 @@ +use cainome::rs::abigen_legacy; + +abigen_legacy!(MyContract, "./contracts/cairo0/kkrt.abi.json",); + +#[tokio::main] +async fn main() {} diff --git a/examples/cairo0_account.rs b/examples/cairo0_account.rs new file mode 100644 index 0000000..aee950c --- /dev/null +++ b/examples/cairo0_account.rs @@ -0,0 +1,6 @@ +use cainome::rs::abigen_legacy; + +abigen_legacy!(MyContract, "./contracts/cairo0/oz0.abi.json",); + +#[tokio::main] +async fn main() {}