Guess I should put this under version control LOL

This commit is contained in:
Ryan McGrath 2019-05-23 22:11:07 -07:00
commit 2035318460
No known key found for this signature in database
GPG key ID: 811674B62B666830
73 changed files with 8836 additions and 0 deletions

30
macros/Cargo.toml Normal file
View file

@ -0,0 +1,30 @@
[package]
name = "alchemy-macros"
description = "A crate containing macros used in Alchemy, the Rust cross-platform GUI framework."
version = "0.1.0"
edition = "2018"
authors = ["Ryan McGrath <ryan@rymc.io>"]
build = "src/build.rs"
license = "MPL-2.0+"
repository = "https://github.com/ryanmcgrath/alchemy"
categories = ["gui", "rendering::engine", "multimedia"]
keywords = ["gui", "css", "styles", "layout", "ui"]
[lib]
proc-macro = true
[badges]
maintenance = { status = "actively-developed" }
[dependencies]
ansi_term = "0.11.0"
lalrpop-util = "0.16.1"
proc-macro2 = { version = "0.4.24", features = ["nightly"] }
proc-macro-hack = "0.5.2"
quote = "0.6.10"
alchemy-styles = { version = "0.1", path = "../styles", features = ["parser", "tokenize"] }
syn = "0.15"
[build-dependencies]
lalrpop = "0.16.1"
version_check = "0.1.5"

8
macros/README.md Normal file
View file

@ -0,0 +1,8 @@
# Alchemy-Macros
This crate holds macros for two things, primarily:
- `rsx! {}`, which transforms `<View></<View>` tags into their proper `RSX` calls. Much of this is forked from the awesome work done by [Bodil Stokke in typed-html](https://github.com/bodil/typed-html).
- `styles! {}`, which transforms CSS style nodes into `Vec<Styles>`, which the rendering engine uses to theme and style nodes. This relies on the [CSS Parser from Servo](https://github.com/servo/rust-cssparser). Styles do not support cascading; this is a design decision, as inheritance is already a bit of a taboo in Rust, so to do it in styling code feels really odd and involves a mental shift the deeper you go. Opt to apply successive style keys, conditionally if need be, to achieve the same thing with a compositional approach.
## Questions, Comments?
Open an issue, or hit me up on [Twitter](https://twitter.com/ryanmcgrath/).

11
macros/src/build.rs Normal file
View file

@ -0,0 +1,11 @@
extern crate lalrpop;
extern crate version_check;
fn main() {
lalrpop::process_root().unwrap();
if version_check::is_nightly().unwrap_or(false) {
println!("cargo:rustc-cfg=can_join_spans");
println!("cargo:rustc-cfg=can_show_location_of_runtime_parse_error");
}
}

116
macros/src/error.rs Normal file
View file

@ -0,0 +1,116 @@
use ansi_term::Style;
use lalrpop_util::ParseError::*;
use crate::lexer::Token;
use proc_macro2::{Ident, TokenStream};
use quote::{quote, quote_spanned};
pub type ParseError = lalrpop_util::ParseError<usize, Token, HtmlParseError>;
#[derive(Debug)]
pub enum HtmlParseError {
TagMismatch { open: Ident, close: Ident },
}
fn pprint_token(token: &str) -> &str {
match token {
"BraceGroupToken" => "code block",
"LiteralToken" => "literal",
"IdentToken" => "identifier",
a => a,
}
}
fn pprint_tokens(tokens: &[String]) -> String {
let tokens: Vec<&str> = tokens.iter().map(|s| pprint_token(&s)).collect();
if tokens.len() > 1 {
let start = tokens[..tokens.len() - 1].join(", ");
let end = &tokens[tokens.len() - 1];
format!("{} or {}", start, end)
} else {
tokens[0].to_string()
}
}
fn is_in_node_position(tokens: &[String]) -> bool {
use std::collections::HashSet;
let input: HashSet<&str> = tokens.iter().map(String::as_str).collect();
let output: HashSet<&str> = ["\"<\"", "BraceGroupToken", "LiteralToken"]
.iter()
.cloned()
.collect();
input == output
}
pub fn parse_error(input: &[Token], error: &ParseError) -> TokenStream {
match error {
InvalidToken { location } => {
let span = input[*location].span();
quote_spanned! {span=>
compile_error! { "invalid token" }
}
}
UnrecognizedToken {
token: None,
expected,
} => {
let msg = format!(
"unexpected end of macro; missing {}",
pprint_tokens(&expected)
);
quote! {
compile_error! { #msg }
}
}
UnrecognizedToken {
token: Some((_, token, _)),
expected,
} => {
let span = token.span();
let error_msg = format!("expected {}", pprint_tokens(&expected));
let error = quote_spanned! {span=>
compile_error! { #error_msg }
};
let help = if is_in_node_position(expected) && token.is_ident() {
// special case: you probably meant to quote that text
let help_msg = format!(
"text nodes need to be quoted, eg. {}",
Style::new().bold().paint("<p>\"Hello Joe!\"</p>")
);
Some(quote_spanned! {span=>
compile_error! { #help_msg }
})
} else {
None
};
quote! {{
#error
#help
}}
}
ExtraToken {
token: (_, token, _),
} => {
let span = token.span();
quote_spanned! {span=>
compile_error! { "superfluous token" }
}
}
User {
error: HtmlParseError::TagMismatch { open, close },
} => {
let close_span = close.span();
let close_msg = format!("expected closing tag '</{}>', found '</{}>'", open, close);
let close_error = quote_spanned! {close_span=>
compile_error! { #close_msg }
};
let open_span = open.span();
let open_error = quote_spanned! {open_span=>
compile_error! { "unclosed tag" }
};
quote! {{
#close_error
#open_error
}}
}
}
}

303
macros/src/grammar.lalrpop Normal file
View file

@ -0,0 +1,303 @@
use crate::lexer::{self, Token, to_stream};
use crate::error::HtmlParseError;
use crate::rsx::{Node, Element};
//use crate::declare::Declare;
use crate::map::StringyMap;
use proc_macro2::{Delimiter, Ident, Literal, Group, TokenTree};
use lalrpop_util::ParseError;
use crate::span;
grammar;
/// Match a B separated list of zero or more A, return a list of A.
Separated<A, B>: Vec<A> = {
<v:(<A> B)*> <e:A?> => match e {
None => v,
Some(e) => {
let mut v = v;
v.push(e);
v
}
}
}
/// Match a B separated list of one or more A, return a list of tokens, including the Bs.
/// Both A and B must resolve to a Token.
SeparatedInc<A, B>: Vec<Token> = {
<v:(A B)*> <e:A> => {
let mut out = Vec::new();
for (a, b) in v {
out.push(a);
out.push(b);
}
out.push(e);
out
}
}
Ident: Ident = IdentToken => {
match <> {
Token::Ident(ident) => ident,
_ => unreachable!()
}
};
Literal: Literal = LiteralToken => {
match <> {
Token::Literal(literal) => literal,
_ => unreachable!()
}
};
GroupToken = {
BraceGroupToken,
BracketGroupToken,
ParenGroupToken,
};
/// A kebab case HTML ident, converted to a snake case ident.
HtmlIdent: Ident = {
<init:(<Ident> "-")*> <last:Ident> => {
let mut init = init;
init.push(last);
let (span, name) = init.into_iter().fold((None, String::new()), |(span, name), token| {
(
match span {
None => Some(token.span().unstable()),
Some(span) => {
#[cfg(can_join_spans)]
{
span.join(token.span().unstable())
}
#[cfg(not(can_join_spans))]
{
Some(span)
}
}
},
if name.is_empty() {
name + &token.to_string()
} else {
name + "_" + &token.to_string()
}
)
});
Ident::new(&name, span::from_unstable(span.unwrap()))
}
};
// The HTML macro
/// An approximation of a Rust expression.
BareExpression: Token = "&"? (IdentToken ":" ":")* SeparatedInc<IdentToken, "."> ParenGroupToken? => {
let (reference, left, right, args) = (<>);
let mut out = Vec::new();
if let Some(reference) = reference {
out.push(reference);
}
for (ident, c1, c2) in left {
out.push(ident);
out.push(c1);
out.push(c2);
}
out.extend(right);
if let Some(args) = args {
out.push(args);
}
Group::new(Delimiter::Brace, to_stream(out)).into()
};
AttrValue: Token = {
LiteralToken,
GroupToken,
BareExpression,
};
Attr: (Ident, Token) = <name:HtmlIdent> "=" <value:AttrValue> => (name, value);
Attrs: StringyMap<Ident, TokenTree> = Attr* => <>.into();
OpeningTag: (Ident, StringyMap<Ident, TokenTree>) = "<" <HtmlIdent> <Attrs> ">";
ClosingTag: Ident = "<" "/" <HtmlIdent> ">";
SingleTag: Element = "<" <name:HtmlIdent> <attributes:Attrs> "/" ">" => {
Element {
name,
attributes,
children: Vec::new(),
}
};
ParentTag: Element = <opening:OpeningTag> <children:Node*> <closing:ClosingTag> =>? {
let (name, attributes) = opening;
let closing_name = closing.to_string();
if closing_name == name.to_string() {
Ok(Element {
name,
attributes,
children,
})
} else {
Err(ParseError::User { error: HtmlParseError::TagMismatch {
open: name.into(),
close: closing.into(),
}})
}
};
Element = {
SingleTag,
ParentTag,
};
TextNode = Literal;
CodeBlock: Group = BraceGroupToken => match <> {
Token::Group(_, group) => group,
_ => unreachable!()
};
Node: Node = {
Element => Node::Element(<>),
TextNode => Node::Text(<>),
CodeBlock => Node::Block(<>),
};
pub NodeWithType: (Node, Option<Vec<Token>>) = {
Node => (<>, None),
<Node> ":" <TypeSpec> => {
let (node, spec) = (<>);
(node, Some(spec))
},
};
// The declare macro
TypePath: Vec<Token> = {
IdentToken => vec![<>],
TypePath ":" ":" IdentToken => {
let (mut path, c1, c2, last) = (<>);
path.push(c1);
path.push(c2);
path.push(last);
path
}
};
Reference: Vec<Token> = "&" ("'" IdentToken)? => {
let (amp, lifetime) = (<>);
let mut out = vec![amp];
if let Some((tick, ident)) = lifetime {
out.push(tick);
out.push(ident);
}
out
};
TypeArgs: Vec<Token> = {
TypeSpec,
TypeArgs "," TypeSpec => {
let (mut args, comma, last) = (<>);
args.push(comma);
args.extend(last);
args
}
};
TypeArgList: Vec<Token> = "<" TypeArgs ">" => {
let (left, mut args, right) = (<>);
args.insert(0, left);
args.push(right);
args
};
FnReturnType: Vec<Token> = "-" ">" TypeSpec => {
let (dash, right, spec) = (<>);
let mut out = vec![dash, right];
out.extend(spec);
out
};
FnArgList: Vec<Token> = ParenGroupToken FnReturnType? => {
let (args, rt) = (<>);
let mut out = vec![args];
if let Some(rt) = rt {
out.extend(rt);
}
out
};
TypeArgSpec = {
TypeArgList,
FnArgList,
};
TypeSpec: Vec<Token> = Reference? TypePath TypeArgSpec? => {
let (reference, path, args) = (<>);
let mut out = Vec::new();
if let Some(reference) = reference {
out.extend(reference);
}
out.extend(path);
if let Some(args) = args {
out.extend(args);
}
out
};
TypeDecl: (Ident, Vec<Token>) = <HtmlIdent> ":" <TypeSpec>;
TypeDecls: Vec<(Ident, Vec<Token>)> = {
TypeDecl => vec![<>],
<decls:TypeDecls> "," <decl:TypeDecl> => {
let mut decls = decls;
decls.push(decl);
decls
},
};
Attributes = "{" <TypeDecls> ","? "}";
TypePathList = "[" <Separated<TypePath, ",">> "]";
IdentList = "[" <Separated<Ident, ",">> "]";
Groups = "in" <TypePathList>;
Children: (Option<Vec<Token>>) = "with" <opt:TypePath?> => {
opt
};
extern {
type Location = usize;
type Error = HtmlParseError;
enum lexer::Token {
"<" => Token::Punct('<', _),
">" => Token::Punct('>', _),
"/" => Token::Punct('/', _),
"=" => Token::Punct('=', _),
"-" => Token::Punct('-', _),
":" => Token::Punct(':', _),
"." => Token::Punct('.', _),
"," => Token::Punct(',', _),
"&" => Token::Punct('&', _),
"'" => Token::Punct('\'', _),
";" => Token::Punct(';', _),
"{" => Token::GroupOpen(Delimiter::Brace, _),
"}" => Token::GroupClose(Delimiter::Brace, _),
"[" => Token::GroupOpen(Delimiter::Bracket, _),
"]" => Token::GroupClose(Delimiter::Bracket, _),
"in" => Token::Keyword(lexer::Keyword::In, _),
"with" => Token::Keyword(lexer::Keyword::With, _),
IdentToken => Token::Ident(_),
LiteralToken => Token::Literal(_),
ParenGroupToken => Token::Group(Delimiter::Parenthesis, _),
BraceGroupToken => Token::Group(Delimiter::Brace, _),
BracketGroupToken => Token::Group(Delimiter::Bracket, _),
}
}

20
macros/src/ident.rs Normal file
View file

@ -0,0 +1,20 @@
//! Utility functions, originally written by Bodil Stokke
//! over in [typed-html](https://github.com/bodil/typed-html).
use proc_macro2::{Ident, Span, TokenStream, TokenTree};
use std::str::FromStr;
pub fn new_raw(string: &str, span: Span) -> Ident {
// Validate that it is an ident.
let _ = Ident::new(string, span);
let s = format!("r#{}", string);
let tts = TokenStream::from_str(&s).unwrap();
let mut ident = match tts.into_iter().next().unwrap() {
TokenTree::Ident(ident) => ident,
_ => unreachable!(),
};
ident.set_span(span);
ident
}

142
macros/src/lexer.rs Normal file
View file

@ -0,0 +1,142 @@
//! Implements the Lexer used for parsing RSX, originally
//! written by Bodil Stokke over in
//! [typed-html](https://github.com/bodil/typed-html).
use crate::error::HtmlParseError;
use proc_macro2::{Delimiter, Group, Ident, Literal, Punct, Span, TokenStream, TokenTree};
use std::iter::FromIterator;
pub type Spanned<Tok, Loc, Error> = Result<(Loc, Tok, Loc), Error>;
#[derive(Clone, Debug)]
pub enum Token {
Ident(Ident),
Literal(Literal),
Punct(char, Punct),
Group(Delimiter, Group),
GroupOpen(Delimiter, Span),
GroupClose(Delimiter, Span),
Keyword(Keyword, Ident),
}
impl Token {
pub fn span(&self) -> Span {
match self {
Token::Ident(ident) => ident.span(),
Token::Literal(literal) => literal.span(),
Token::Punct(_, punct) => punct.span(),
Token::Group(_, group) => group.span(),
Token::GroupOpen(_, span) => *span,
Token::GroupClose(_, span) => *span,
Token::Keyword(_, ident) => ident.span(),
}
}
pub fn is_ident(&self) -> bool {
match self {
Token::Ident(_) => true,
_ => false,
}
}
}
impl From<Token> for TokenTree {
fn from(token: Token) -> Self {
match token {
Token::Ident(ident) => TokenTree::Ident(ident),
Token::Literal(literal) => TokenTree::Literal(literal),
Token::Punct(_, punct) => TokenTree::Punct(punct),
Token::Group(_, group) => TokenTree::Group(group),
Token::GroupOpen(_, _) => panic!("Can't convert a GroupOpen token to a TokenTree"),
Token::GroupClose(_, _) => panic!("Can't convert a GroupClose token to a TokenTree"),
Token::Keyword(_, ident) => TokenTree::Ident(ident),
}
}
}
impl From<Token> for TokenStream {
fn from(token: Token) -> Self {
TokenStream::from_iter(vec![TokenTree::from(token)])
}
}
impl From<Ident> for Token {
fn from(ident: Ident) -> Self {
Token::Ident(ident)
}
}
impl From<Literal> for Token {
fn from(literal: Literal) -> Self {
Token::Literal(literal)
}
}
impl From<Punct> for Token {
fn from(punct: Punct) -> Self {
Token::Punct(punct.as_char(), punct)
}
}
impl From<Group> for Token {
fn from(group: Group) -> Self {
Token::Group(group.delimiter(), group)
}
}
#[derive(Debug, Clone)]
pub enum Keyword {
In,
With,
}
pub fn to_stream<I: IntoIterator<Item = Token>>(tokens: I) -> TokenStream {
let mut stream = TokenStream::new();
stream.extend(tokens.into_iter().map(TokenTree::from));
stream
}
pub fn unroll_stream(stream: TokenStream, deep: bool) -> Vec<Token> {
let mut vec = Vec::new();
for tt in stream {
match tt {
TokenTree::Ident(ident) => vec.push(ident.into()),
TokenTree::Literal(literal) => vec.push(literal.into()),
TokenTree::Punct(punct) => vec.push(punct.into()),
TokenTree::Group(ref group) if deep && group.delimiter() != Delimiter::Parenthesis => {
vec.push(Token::GroupOpen(group.delimiter(), group.span()));
let sub = unroll_stream(group.stream(), deep);
vec.extend(sub);
vec.push(Token::GroupClose(group.delimiter(), group.span()));
}
TokenTree::Group(group) => vec.push(group.into()),
}
}
vec
}
pub struct Lexer<'a> {
stream: &'a [Token],
pos: usize,
}
impl<'a> Lexer<'a> {
pub fn new(stream: &'a [Token]) -> Self {
Lexer { stream, pos: 0 }
}
}
impl<'a> Iterator for Lexer<'a> {
type Item = Spanned<Token, usize, HtmlParseError>;
fn next(&mut self) -> Option<Self::Item> {
match self.stream.get(self.pos) {
None => None,
Some(token) => {
self.pos += 1;
Some(Ok((self.pos - 1, token.clone(), self.pos)))
}
}
}
}

79
macros/src/lib.rs Normal file
View file

@ -0,0 +1,79 @@
#![recursion_limit = "128"]
#![cfg_attr(can_show_location_of_runtime_parse_error, feature(proc_macro_span))]
//! Implements macros used in Alchemy.
//!
//! - `rsx! {}`, which turns RSX tags into `RSX` node trees.
//! - `styles! {}`, which turns CSS stylesheet strings into `Vec<Styles>`.
//!
//! In general, you should prefer using these to constructing the above values manually.
//!
//! Much of the `rsx! {}` support is achieved by forking code riginally written by Bodil Stokke
//! over in [typed-html](https://github.com/bodil/typed-html).
extern crate proc_macro;
mod error;
mod rsx;
mod ident;
mod lexer;
mod map;
mod parser;
mod span;
use proc_macro::TokenStream;
use proc_macro2::{TokenStream as TokenStream2, Literal};
use proc_macro_hack::proc_macro_hack;
use quote::quote;
use alchemy_styles::cssparser::{Parser, ParserInput, RuleListParser};
use alchemy_styles::styles_parser::{Rule, RuleParser};
/// Implements the `rsx! {}` macro, which turns RSX tags into `RSX` node trees.
#[proc_macro_hack]
pub fn rsx(input: TokenStream) -> TokenStream {
let stream = lexer::unroll_stream(input.into(), false);
let result = rsx::expand_rsx(&stream);
TokenStream::from(match result {
Err(err) => error::parse_error(&stream, &err),
Ok((node, ty)) => match node.into_token_stream(&ty) {
Err(err) => err,
Ok(success) => success,
},
})
}
/// Implements the `styles! {}` macro, which turns CSS stylesheet strings into `Vec<Styles>`.
#[proc_macro_hack]
pub fn styles(input: TokenStream) -> TokenStream {
let s = input.to_string().replace(" ", "");
let mut input = ParserInput::new(&s);
let mut parser = Parser::new(&mut input);
let parsed: Vec<Rule> = RuleListParser::new_for_stylesheet(&mut parser, RuleParser {})
.collect::<Vec<_>>()
.into_iter()
.filter_map(|rule| {
rule.ok()
})
.collect();
let mut body = TokenStream2::new();
for rule in parsed {
let mut stream = TokenStream2::new();
for style in rule.styles {
stream.extend(quote!(#style,));
}
let key = Literal::string(&rule.key);
body.extend(quote!(styles.insert(#key, vec![#stream]);))
}
quote!(alchemy::theme::StyleSheet::new({
use alchemy::theme::styles::*;
use alchemy::theme::color::Color;
let mut styles = std::collections::HashMap::new();
#body
styles
})).into()
}

54
macros/src/map.rs Normal file
View file

@ -0,0 +1,54 @@
//! Implements StringyMap, originally written by Bodil Stokke
//! over in [typed-html](https://github.com/bodil/typed-html).
use std::collections::BTreeMap;
#[derive(Clone)]
pub struct StringyMap<K, V>(BTreeMap<String, (K, V)>);
impl<K, V> StringyMap<K, V>
where
K: ToString,
{
pub fn new() -> Self {
StringyMap(BTreeMap::new())
}
pub fn insert(&mut self, k: K, v: V) -> Option<V> {
let s = k.to_string();
self.0.insert(s, (k, v)).map(|(_, v)| v)
}
pub fn remove(&mut self, k: &K) -> Option<V> {
let s = k.to_string();
self.0.remove(&s).map(|(_, v)| v)
}
pub fn iter(&self) -> impl Iterator<Item = &(K, V)> {
self.0.values()
}
pub fn keys(&self) -> impl Iterator<Item = &K> {
self.0.values().map(|(k, _)| k)
}
#[allow(dead_code)]
pub fn len(&self) -> usize {
self.0.len()
}
}
impl<K, V, OK, OV> From<Vec<(OK, OV)>> for StringyMap<K, V>
where
OK: Into<K>,
OV: Into<V>,
K: ToString,
{
fn from(vec: Vec<(OK, OV)>) -> Self {
let mut out = Self::new();
for (key, value) in vec {
out.insert(key.into(), value.into());
}
out
}
}

6
macros/src/parser.rs Normal file
View file

@ -0,0 +1,6 @@
//! Implements parsing, originally written by Bodil Stokke
//! over in [typed-html](https://github.com/bodil/typed-html).
use lalrpop_util::lalrpop_mod;
lalrpop_mod!(pub grammar);

244
macros/src/rsx.rs Normal file
View file

@ -0,0 +1,244 @@
use proc_macro2::{Delimiter, Group, Ident, Literal, Span, TokenStream, TokenTree};
use quote::{quote, quote_spanned};
use crate::error::ParseError;
use crate::ident;
use crate::lexer::{/*to_stream, */Lexer, Token};
use crate::map::StringyMap;
use crate::parser::grammar;
use std::iter::FromIterator;
#[derive(Clone)]
pub enum Node {
Element(Element),
Text(Literal),
Block(Group),
}
impl Node {
pub fn into_token_stream(self, ty: &Option<Vec<Token>>) -> Result<TokenStream, TokenStream> {
match self {
Node::Element(el) => el.into_token_stream(ty),
Node::Text(text) => {
let text = TokenTree::Literal(text);
Ok(quote!(alchemy::RSX::text(#text.to_string())))
}
Node::Block(group) => {
let span = group.span();
let error =
"you cannot use a block as a top level element or a required child element";
Err(quote_spanned! { span=>
compile_error! { #error }
})
}
}
}
fn into_child_stream(self, ty: &Option<Vec<Token>>) -> Result<TokenStream, TokenStream> {
match self {
Node::Element(el) => {
let el = el.into_token_stream(ty)?;
Ok(quote!(
/*element.*/children.push(#el);
))
}
tx @ Node::Text(_) => {
let tx = tx.into_token_stream(ty)?;
Ok(quote!(
/*element.*/children.push(#tx);
))
}
Node::Block(group) => {
let group: TokenTree = group.into();
Ok(quote!(
for child in #group.into_iter() {
/*element.*/children.push(child);
}
))
}
}
}
}
#[derive(Clone)]
pub struct Element {
pub name: Ident,
pub attributes: StringyMap<Ident, TokenTree>,
pub children: Vec<Node>,
}
fn extract_event_handlers(
attrs: &mut StringyMap<Ident, TokenTree>,
) -> StringyMap<Ident, TokenTree> {
let mut events = StringyMap::new();
let keys: Vec<Ident> = attrs.keys().cloned().collect();
for key in keys {
let key_name = key.to_string();
let prefix = "on";
if key_name.starts_with(prefix) {
let event_name = &key_name[prefix.len()..];
let value = attrs.remove(&key).unwrap();
events.insert(ident::new_raw(event_name, key.span()), value);
}
}
events
}
fn process_value(value: &TokenTree) -> TokenStream {
match value {
TokenTree::Group(g) if g.delimiter() == Delimiter::Bracket => {
let content = g.stream();
quote!( [ #content ] )
}
TokenTree::Group(g) if g.delimiter() == Delimiter::Parenthesis => {
let content = g.stream();
quote!( ( #content ) )
}
v => TokenStream::from_iter(vec![v.clone()]),
}
}
fn is_string_literal(literal: &Literal) -> bool {
// This is the worst API
literal.to_string().starts_with('"')
}
#[allow(dead_code)]
fn stringify_ident(ident: &Ident) -> String {
let s = ident.to_string();
if s.starts_with("r#") {
s[2..].to_string()
} else {
s
}
}
impl Element {
fn into_token_stream(mut self, ty: &Option<Vec<Token>>) -> Result<TokenStream, TokenStream> {
let name = self.name;
let name_str = name.to_string();
let typename: TokenTree = Ident::new(&name_str, name.span()).into();
let events = extract_event_handlers(&mut self.attributes);
let attrs = self.attributes.iter().map(|(key, value)| {
let name = key.to_string();
let token = TokenTree::Ident(ident::new_raw(&name, key.span()));
(name, token, value)
});
let mut attributes = TokenStream::new();
let mut styles = TokenStream::new();
styles.extend(quote!(alchemy::SpacedSet::new()));
for (attr_str, key, value) in attrs {
match value {
TokenTree::Literal(lit) if is_string_literal(lit) => {
let mut eprintln_msg = "ERROR: ".to_owned();
#[cfg(can_show_location_of_runtime_parse_error)]
{
let span = lit.span();
eprintln_msg += &format!(
"{}:{}:{}: ",
span.unstable()
.source_file()
.path()
.to_str()
.unwrap_or("unknown"),
span.unstable().start().line,
span.unstable().start().column
);
}
eprintln_msg += &format!(
"<{} {}={}> failed to parse attribute value: {{}}",
name_str, attr_str, lit,
);
#[cfg(not(can_show_location_of_runtime_parse_error))]
{
eprintln_msg += "\nERROR: rebuild with nightly to print source location";
}
//body.extend(quote!(
/*element.attrs.#key = Some(#lit.parse().unwrap_or_else(|err| {
eprintln!(#eprintln_msg, err);
panic!("failed to parse string literal");
}));*/
//));
},
value => {
let key = key.to_string();
let value = process_value(value);
if key == "r#styles" {
styles = quote!(std::convert::Into::into(#value));
continue;
}
if key == "r#key" {
continue;
}
attributes.extend(quote!(
attributes.insert(#key, std::convert::Into::into(#value));
));
}
}
}
for (key, _value) in events.iter() {
if ty.is_none() {
let mut err = quote_spanned! { key.span() =>
compile_error! { "when using event handlers, you must declare the output type inside the rsx! macro" }
};
let hint = quote_spanned! { Span::call_site() =>
compile_error! { "for example: change rsx!(<div>...</div>) to rsx!(<div>...</div> : String)" }
};
err.extend(hint);
return Err(err);
}
//let key = TokenTree::Ident(key.clone());
//let value = process_value(value);
/*body.extend(quote!(
element.events.#key = Some(alchemy::dom::events::IntoEventHandler::into_event_handler(#value));
));*/
}
/*let mut args = TokenStream::new();
let mut type_annotation = TokenStream::new();
if let Some(ty) = ty {
let type_var = to_stream(ty.clone());
type_annotation.extend(quote!(: #typename<#type_var>));
}*/
let mut children = TokenStream::new();
children.extend(self.children.into_iter().map(|node| {
node.into_child_stream(ty)
}).collect::<Result<Vec<TokenStream>, TokenStream>>()?);
let component_name = Literal::string(&typename.to_string());
Ok(quote!(
alchemy::RSX::node(#component_name, || Box::new(#typename::default()), alchemy::Props {
attributes: {
let mut attributes = std::collections::HashMap::new();
#attributes
attributes
},
children: {
let mut children = vec![];
#children
children
},
key: "".into(),
styles: #styles
})
))
}
}
// FIXME report a decent error when the macro contains multiple top level elements
pub fn expand_rsx(input: &[Token]) -> Result<(Node, Option<Vec<Token>>), ParseError> {
grammar::NodeWithTypeParser::new().parse(Lexer::new(input))
}

13
macros/src/span.rs Normal file
View file

@ -0,0 +1,13 @@
//! Utility functions, originally written by Bodil Stokke
//! over in [typed-html](https://github.com/bodil/typed-html).
use proc_macro;
use proc_macro2;
pub fn from_unstable(span: proc_macro::Span) -> proc_macro2::Span {
let ident = proc_macro::Ident::new("_", span);
let tt = proc_macro::TokenTree::Ident(ident);
let tts = proc_macro::TokenStream::from(tt);
let tts2 = proc_macro2::TokenStream::from(tts);
tts2.into_iter().next().unwrap().span()
}