You cannot select more than 25 topics Topics must start with a letter or number, can include dashes ('-') and can be up to 35 characters long.

192 lines
4.8 KiB
Rust

extern crate proc_macro;
use proc_macro::{Literal, TokenStream, TokenTree};
use proc_macro_roids::{DeriveInputStructExt, FieldExt};
use quote::quote;
use std::str::FromStr;
use proc_macro2::TokenStream as TokenStream2;
use syn::{parse_macro_input, parse_quote, DeriveInput};
#[proc_macro_derive(ShaderData, attributes(skip))]
pub fn derive_shader_data(token_stream: TokenStream) -> TokenStream {
let mut iter = token_stream.into_iter();
let empty = TokenStream::new();
if let Some(TokenTree::Ident(ident)) = iter.next() {
if ident.to_string() != "struct" {
return empty;
}
} else {
return empty;
}
let struct_name = if let Some(TokenTree::Ident(ident)) = iter.next() {
ident.to_string()
} else {
return empty;
};
let mut group = if let Some(TokenTree::Group(group)) = iter.next() {
group
} else {
return empty;
}
.stream()
.into_iter();
let mut keys = vec![];
loop {
let item = group.next();
if item.is_none() {
break;
}
let item = item.unwrap();
if item.to_string() == "#" {
if let Some(TokenTree::Group(tag)) = group.next() {
if tag.to_string() == "[skip]" {
while let Some(x) = group.next() {
if x.to_string() == "," {
break;
}
}
}
continue;
} else {
break;
}
}
let name = match item {
TokenTree::Ident(ident) => ident.to_string(),
_ => break,
};
// :
group.next();
let mut stream = vec![];
while let Some(x) = group.next() {
if x.to_string() == "," {
break;
}
stream.push(x);
}
keys.push(name);
}
let init = keys
.iter()
.map(|name| Literal::string(&name).to_string())
.collect::<Vec<String>>()
.join(", ");
let mut index = -1;
let apply = keys
.iter()
.map(|name| {
index += 1;
format!(
"(gl as &dyn UpdateUniform<_>).update_uniform_by_index(gl, program_id, {}, &self.{});",
index,
name
)
})
.collect::<Vec<String>>()
.join("\n");
let gen_source = format!(
r#"
impl ::eatgel::ShaderData for {} {{
fn init(&mut self, gl: &mut ::eatgel::GlContext, program_id: u32) {{
gl.register_uniforms(program_id, &[{}]);
}}
fn apply(&self, gl: &::eatgel::GlContext, program_id: u32) {{
{}
}}
}}
"#,
struct_name, init, apply,
);
TokenStream::from_str(&gen_source).unwrap()
}
#[proc_macro_derive(VertexData, attributes(skip))]
pub fn derive_vertex_data(token_stream: TokenStream) -> TokenStream {
let ast = parse_macro_input!(token_stream as DeriveInput);
let name = &ast.ident;
let mut index = -1;
let fields = ast
.fields()
.iter()
.map(move |field| {
index += 1;
let name = index;
(name, field)
})
.filter(|(_, field)| !field.is_phantom_data())
.filter(|(_, field)| !field.contains_tag(&parse_quote!(vertex_data), &parse_quote!(skip)));
let sizes = fields
.clone()
.map(|(_, field)| {
let type_name = field.type_name();
quote!(
::std::mem::size_of::<#type_name>()
)
})
.collect::<Vec<_>>();
let types = fields
.clone()
.map(|(_, field)| {
let type_name = field.type_name();
quote!(
::eatgel::type_descriptor_of::<#type_name>()
)
})
.collect::<Vec<_>>();
let pointers = fields
.map(|(index, field)| {
if let Some(name) = field.ident.as_ref() {
quote!(
self.#name.as_ptr() as *const ::std::ffi::c_void
)
} else {
quote!(
self.#index.as_ptr() as *const ::std::ffi::c_void
)
}
})
.collect::<Vec<_>>();
let token_stream2: TokenStream2 = quote!(
impl ::eatgel::VertexData for #name {
fn get_sizes() -> Box<[usize]> {
return vec![#(#sizes,)*].into_boxed_slice()
}
fn get_types() -> Box<[::eatgel::TypeDescriptor]> {
return vec![#(#types,)*].into_boxed_slice()
}
fn get_pointers(&self) -> Box<[*const ::std::ffi::c_void]> {
return vec![#(#pointers,)*].into_boxed_slice()
}
}
);
token_stream2.into()
}