Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

feat: add typeshare support #38

Open
wants to merge 2 commits into
base: main
Choose a base branch
from
Open
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
12 changes: 10 additions & 2 deletions crates/rs-macro/src/lib.rs
Original file line number Diff line number Diff line change
Expand Up @@ -29,7 +29,12 @@ fn abigen_internal(input: TokenStream) -> TokenStream {
let abi_tokens = AbiParser::collect_tokens(&abi_entries, &contract_abi.type_aliases)
.expect("failed tokens parsing");

let expanded = cainome_rs::abi_to_tokenstream(&contract_name.to_string(), &abi_tokens);
// Typeshare is not parseable when the macro is expanded before compilation but not not
// written to a file.
let add_typeshare = false;

let expanded =
cainome_rs::abi_to_tokenstream(&contract_name.to_string(), &abi_tokens, add_typeshare);

if let Some(out_path) = contract_abi.output_path {
let content: String = expanded.to_string();
Expand All @@ -53,7 +58,10 @@ fn abigen_internal_legacy(input: TokenStream) -> TokenStream {
let abi_tokens = AbiParserLegacy::collect_tokens(&abi_entries, &contract_abi.type_aliases)
.expect("failed tokens parsing");

let expanded = cainome_rs::abi_to_tokenstream(&contract_name.to_string(), &abi_tokens);
let add_typeshare = false;

let expanded =
cainome_rs::abi_to_tokenstream(&contract_name.to_string(), &abi_tokens, add_typeshare);

if let Some(out_path) = contract_abi.output_path {
let content: String = expanded.to_string();
Expand Down
24 changes: 22 additions & 2 deletions crates/rs/src/expand/enum.rs
Original file line number Diff line number Diff line change
Expand Up @@ -9,7 +9,7 @@ use crate::expand::utils;
pub struct CairoEnum;

impl CairoEnum {
pub fn expand_decl(composite: &Composite) -> TokenStream2 {
pub fn expand_decl(composite: &Composite, add_typeshare: bool) -> TokenStream2 {
if composite.is_builtin() {
return quote!();
}
Expand All @@ -18,6 +18,8 @@ impl CairoEnum {

let mut variants: Vec<TokenStream2> = vec![];

let mut is_algebraic = false;

for inner in &composite.inners {
let name = utils::str_to_ident(&inner.name);
let ty = utils::str_to_type(&inner.token.to_rust_type());
Expand All @@ -26,10 +28,11 @@ impl CairoEnum {
variants.push(quote!(#name));
} else {
variants.push(quote!(#name(#ty)));
is_algebraic = true;
}
}

if composite.is_generic() {
let decl = if composite.is_generic() {
let gen_args: Vec<Ident> = composite
.generic_args
.iter()
Expand Down Expand Up @@ -58,6 +61,23 @@ impl CairoEnum {
#(#variants),*
}
}
};

if add_typeshare {
if is_algebraic {
quote! {
#[typeshare]
#[serde(tag = "type", content = "content")]
#decl
}
} else {
quote! {
#[typeshare]
#decl
}
}
} else {
decl
}
}

Expand Down
13 changes: 11 additions & 2 deletions crates/rs/src/expand/struct.rs
Original file line number Diff line number Diff line change
Expand Up @@ -9,7 +9,7 @@ use crate::expand::utils;
pub struct CairoStruct;

impl CairoStruct {
pub fn expand_decl(composite: &Composite) -> TokenStream2 {
pub fn expand_decl(composite: &Composite, add_typeshare: bool) -> TokenStream2 {
if composite.is_builtin() {
return quote!();
}
Expand All @@ -35,7 +35,7 @@ impl CairoStruct {
}
}

if composite.is_generic() {
let decl = if composite.is_generic() {
let gen_args: Vec<Ident> = composite
.generic_args
.iter()
Expand Down Expand Up @@ -64,6 +64,15 @@ impl CairoStruct {
#(pub #members),*
}
}
};

if add_typeshare {
quote! {
#[typeshare]
#decl
}
} else {
decl
}
}

Expand Down
22 changes: 18 additions & 4 deletions crates/rs/src/lib.rs
Original file line number Diff line number Diff line change
Expand Up @@ -63,6 +63,8 @@ pub struct Abigen {
/// Types aliases to avoid name conflicts, as for now the types are limited to the
/// latest segment of the fully qualified path.
pub types_aliases: HashMap<String, String>,
/// Adds the typeshare attribute to the generated types.
pub add_typeshare: bool,
}

impl Abigen {
Expand All @@ -78,6 +80,7 @@ impl Abigen {
contract_name: contract_name.to_string(),
abi_source: Utf8PathBuf::from(abi_source),
types_aliases: HashMap::new(),
add_typeshare: false,
}
}

Expand All @@ -91,13 +94,19 @@ impl Abigen {
self
}

/// Adds the typeshare attribute to the generated types.
pub fn with_typeshare(mut self) -> Self {
self.add_typeshare = true;
self
}

/// Generates the contract bindings.
pub fn generate(&self) -> Result<ContractBindings> {
let file_content = std::fs::read_to_string(&self.abi_source)?;

match AbiParser::tokens_from_abi_string(&file_content, &self.types_aliases) {
Ok(tokens) => {
let expanded = abi_to_tokenstream(&self.contract_name, &tokens);
let expanded = abi_to_tokenstream(&self.contract_name, &tokens, self.add_typeshare);

Ok(ContractBindings {
name: self.contract_name.clone(),
Expand All @@ -120,7 +129,12 @@ impl Abigen {
///
/// * `contract_name` - Name of the contract.
/// * `abi_tokens` - Tokenized ABI.
pub fn abi_to_tokenstream(contract_name: &str, abi_tokens: &TokenizedAbi) -> TokenStream2 {
/// * `add_typeshare` - Adds the typeshare attribute to the generated types.
pub fn abi_to_tokenstream(
contract_name: &str,
abi_tokens: &TokenizedAbi,
add_typeshare: bool,
) -> TokenStream2 {
let contract_name = utils::str_to_ident(contract_name);

let mut tokens: Vec<TokenStream2> = vec![];
Expand All @@ -129,13 +143,13 @@ pub fn abi_to_tokenstream(contract_name: &str, abi_tokens: &TokenizedAbi) -> Tok

for s in &abi_tokens.structs {
let s_composite = s.to_composite().expect("composite expected");
tokens.push(CairoStruct::expand_decl(s_composite));
tokens.push(CairoStruct::expand_decl(s_composite, add_typeshare));
tokens.push(CairoStruct::expand_impl(s_composite));
}

for e in &abi_tokens.enums {
let e_composite = e.to_composite().expect("composite expected");
tokens.push(CairoEnum::expand_decl(e_composite));
tokens.push(CairoEnum::expand_decl(e_composite, add_typeshare));
tokens.push(CairoEnum::expand_impl(e_composite));

tokens.push(CairoEnumEvent::expand(
Expand Down
17 changes: 17 additions & 0 deletions examples/typeshare.rs
Original file line number Diff line number Diff line change
@@ -0,0 +1,17 @@
// To run this example:
//
// cargo run --example typeshare --all-features
// Then you can run typeshare command on the generated file.
use cainome::rs::Abigen;

#[tokio::main]
async fn main() {
let abigen =
Abigen::new("MyContract", "./contracts/abi/simple_get_set.abi.json").with_typeshare();

abigen
.generate()
.unwrap()
.write_to_file("/tmp/with_typeshare.rs")
.unwrap();
}
7 changes: 6 additions & 1 deletion src/bin/cli/plugins/builtins/rust.rs
Original file line number Diff line number Diff line change
Expand Up @@ -24,6 +24,7 @@ impl BuiltinPlugin for RustPlugin {
// For now, let's only take the latest part of this path.
// TODO: if a project has several contracts with the same name under different
// namespaces, we should provide a solution to solve those conflicts.
// Check how the aliases can be passed here.
let contract_name = contract
.name
.split("::")
Expand All @@ -32,7 +33,11 @@ impl BuiltinPlugin for RustPlugin {
.from_case(Case::Snake)
.to_case(Case::Pascal);

let expanded = cainome_rs::abi_to_tokenstream(&contract_name, &contract.tokens);
// TODO: add typeshare support?
let add_typeshare = false;

let expanded =
cainome_rs::abi_to_tokenstream(&contract_name, &contract.tokens, add_typeshare);
let filename = format!(
"{}.rs",
contract_name.from_case(Case::Pascal).to_case(Case::Snake)
Expand Down
Loading