Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

Add docs feature #101

Merged
merged 4 commits into from
Jun 23, 2021
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension


Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
1 change: 1 addition & 0 deletions .github/workflows/rust.yml
Original file line number Diff line number Diff line change
Expand Up @@ -40,6 +40,7 @@ jobs:
- name: check-features
run: |
cargo check --no-default-features --features bit-vec
cargo check --no-default-features --features docs
cargo check --no-default-features --features serde
cargo check --no-default-features --features serde,decode

Expand Down
6 changes: 5 additions & 1 deletion Cargo.toml
Original file line number Diff line number Diff line change
Expand Up @@ -22,14 +22,18 @@ derive_more = { version = "0.99.1", default-features = false, features = ["from"
scale = { package = "parity-scale-codec", version = "2.1", default-features = false, features = ["derive"] }

[features]
default = ["std"]
default = ["std", "docs"]
std = [
"bitvec/std",
"scale/std",
]
derive = [
"scale-info-derive"
]
# Include rustdoc strings in the type metadata.
docs = [
"scale-info-derive/docs"
]
# enables decoding and deserialization of portable scale-info type metadata
decode = [
"scale/full"
Expand Down
5 changes: 5 additions & 0 deletions derive/Cargo.toml
Original file line number Diff line number Diff line change
Expand Up @@ -19,3 +19,8 @@ quote = "1.0"
syn = { version = "1.0", features = ["derive", "visit", "visit-mut", "extra-traits"] }
proc-macro2 = "1.0"
proc-macro-crate = "1"

[features]
default = ["docs"]
# Include code docs in type metadata.
docs = []
48 changes: 40 additions & 8 deletions derive/src/lib.rs
Original file line number Diff line number Diff line change
Expand Up @@ -104,7 +104,7 @@ fn generate_type(input: TokenStream2) -> Result<TokenStream2> {
Data::Enum(ref e) => generate_variant_type(e, &scale_info),
Data::Union(_) => return Err(Error::new_spanned(input, "Unions not supported")),
};
let docs = utils::get_doc_literals(&ast.attrs);
let docs = generate_docs(&ast.attrs);

let type_info_impl = quote! {
impl #impl_generics :: #scale_info ::TypeInfo for #ident #ty_generics #where_clause {
Expand All @@ -113,7 +113,7 @@ fn generate_type(input: TokenStream2) -> Result<TokenStream2> {
:: #scale_info ::Type::builder()
.path(:: #scale_info ::Path::new(::core::stringify!(#ident), ::core::module_path!()))
.type_params(:: #scale_info ::prelude::vec![ #( #generic_type_ids ),* ])
.docs(&[ #( #docs ),* ])
#docs
.#build_type
}
}
Expand Down Expand Up @@ -160,7 +160,7 @@ fn generate_fields(fields: &FieldsList) -> Vec<TokenStream2> {
StaticLifetimesReplace.visit_type_mut(&mut ty);

let type_name = clean_type_string(&quote!(#ty).to_string());
let docs = utils::get_doc_literals(&f.attrs);
let docs = generate_docs(&f.attrs);
let type_of_method = if utils::is_compact(f) {
quote!(compact)
} else {
Expand All @@ -176,7 +176,7 @@ fn generate_fields(fields: &FieldsList) -> Vec<TokenStream2> {
.#type_of_method::<#ty>()
#name
.type_name(#type_name)
.docs(&[ #( #docs ),* ])
#docs
)
)
})
Expand Down Expand Up @@ -235,12 +235,12 @@ fn generate_c_like_enum_def(variants: &VariantList, scale_info: &Ident) -> Token
.map(|(i, v)| {
let name = &v.ident;
let discriminant = utils::variant_index(v, i);
let docs = utils::get_doc_literals(&v.attrs);
let docs = generate_docs(&v.attrs);
quote! {
.variant(::core::stringify!(#name), |v|
v
.discriminant(#discriminant as ::core::primitive::u64)
.docs(&[ #( #docs ),* ])
#docs
)
}
});
Expand Down Expand Up @@ -272,7 +272,7 @@ fn generate_variant_type(data_enum: &DataEnum, scale_info: &Ident) -> TokenStrea
.map(|v| {
let ident = &v.ident;
let v_name = quote! {::core::stringify!(#ident) };
let docs = utils::get_doc_literals(&v.attrs);
let docs = generate_docs(&v.attrs);
let index = utils::maybe_index(v).map(|i| quote!(.index(#i)));

let fields = match v.fields {
Expand Down Expand Up @@ -301,7 +301,7 @@ fn generate_variant_type(data_enum: &DataEnum, scale_info: &Ident) -> TokenStrea
.variant(#v_name, |v|
v
.fields(#fields)
.docs(&[ #( #docs ),* ])
#docs
#index
)
}
Expand All @@ -313,3 +313,35 @@ fn generate_variant_type(data_enum: &DataEnum, scale_info: &Ident) -> TokenStrea
)
}
}

#[cfg(feature = "docs")]
fn generate_docs(attrs: &[syn::Attribute]) -> Option<TokenStream2> {
let docs = attrs
.iter()
.filter_map(|attr| {
if let Ok(syn::Meta::NameValue(meta)) = attr.parse_meta() {
if meta.path.get_ident().map_or(false, |ident| ident == "doc") {
let lit = &meta.lit;
let doc_lit = quote!(#lit).to_string();
let trimmed_doc_lit =
doc_lit.trim_start_matches(r#"" "#).trim_end_matches('"');
let lit: syn::Lit = parse_quote!(#trimmed_doc_lit);
Some(lit)
} else {
None
}
} else {
None
}
})
.collect::<Vec<_>>();

Some(quote! {
.docs(&[ #( #docs ),* ])
})
}

#[cfg(not(feature = "docs"))]
fn generate_docs(_: &[syn::Attribute]) -> Option<TokenStream2> {
None
}
27 changes: 0 additions & 27 deletions derive/src/utils.rs
Original file line number Diff line number Diff line change
Expand Up @@ -16,15 +16,10 @@
//!
//! NOTE: The code here is copied verbatim from `parity-scale-codec-derive`.

use alloc::{
string::ToString,
vec::Vec,
};
use proc_macro2::TokenStream;
use quote::quote;
use syn::{
parse::Parse,
parse_quote,
punctuated::Punctuated,
spanned::Spanned,
token,
Expand All @@ -37,28 +32,6 @@ use syn::{
Variant,
};

/// Return all doc attributes literals found.
pub fn get_doc_literals(attrs: &[syn::Attribute]) -> Vec<syn::Lit> {
attrs
.iter()
.filter_map(|attr| {
if let Ok(syn::Meta::NameValue(meta)) = attr.parse_meta() {
if meta.path.get_ident().map_or(false, |ident| ident == "doc") {
let lit = &meta.lit;
let doc_lit = quote!(#lit).to_string();
let trimmed_doc_lit =
doc_lit.trim_start_matches(r#"" "#).trim_end_matches('"');
Some(parse_quote!(#trimmed_doc_lit))
} else {
None
}
} else {
None
}
})
.collect()
}

/// Trait bounds.
pub type TraitBounds = Punctuated<syn::WherePredicate, token::Comma>;

Expand Down