Realized I already did most of the DataTree work in another file.

Weird, to be frank.  It was a lot of work.  Can't believe I don't
even remember doing it before.  Oh well.

In any case, I've improved the 'old' one quite a bit.  It should
be more robust now, and will provide errors that may actually be
useful to people when a file fails to parse.
This commit is contained in:
Nathan Vegdahl 2016-03-06 22:47:41 -08:00
parent 62389d42ae
commit 6268a61770
4 changed files with 260 additions and 518 deletions

View File

@ -1,336 +0,0 @@
pub enum Node {
Internal {
type_name: String,
ident: Option<String>,
children: Vec<Node>,
},
Leaf {
type_name: String,
contents: String,
},
}
impl Node {
fn from_string(text: &str) -> Node {
let mut nodes = Vec::new();
let mut ti = token_iter(text);
while let Some(node) = parse_node(&mut ti) {
nodes.push(node);
}
Node::Internal {
type_name: "ROOT".to_string(),
ident: None,
children: nodes,
}
}
}
fn parse_node(ti: &mut TokenIter) -> Option<Node> {
let type_name = if let Some(Token::TypeName(token)) = ti.next() {
token
} else {
panic!("Parse error")
};
let ident = match ti.next() {
Some(Token::Ident(token)) => Some(token),
_ => None,
};
// TODO
unimplemented!()
}
fn token_iter<'a>(text: &'a str) -> TokenIter<'a> {
TokenIter {
text: text,
bytes_consumed: 0,
after_open_leaf: false,
}
}
// ================================================================
/// Tokens contain their starting byte offset in the original source
/// text. Some variants also contain a string slice of the relevant
/// text.
#[derive(Debug, PartialEq, Eq)]
enum Token<'a> {
TypeName((usize, &'a str)),
Ident((usize, &'a str)),
OpenInner(usize),
CloseInner(usize),
OpenLeaf(usize),
CloseLeaf(usize),
LeafContents((usize, &'a str)),
Unknown(usize),
}
struct TokenIter<'a> {
text: &'a str,
bytes_consumed: usize,
after_open_leaf: bool,
}
impl<'a> Iterator for TokenIter<'a> {
type Item = Token<'a>;
fn next(&mut self) -> Option<Token<'a>> {
let mut token = None;
let mut iter = self.text.char_indices().peekable();
if !self.after_open_leaf {
// Skip newlines, whitespace, and comments
loop {
let mut skipped = false;
while let Some(&(_, c)) = iter.peek() {
if is_ws_char(c) || is_nl_char(c) {
iter.next();
skipped = true;
} else {
break;
}
}
if let Some(&(_, c)) = iter.peek() {
if is_comment_char(c) {
iter.next();
skipped = true;
while let Some(&(_, c)) = iter.peek() {
if !is_nl_char(c) {
iter.next();
} else {
break;
}
}
iter.next();
}
}
if !skipped {
break;
}
}
// Parse the meat of the token
if let Some(&(i, c)) = iter.peek() {
// TypeName
if is_ident_char(c) {
iter.next();
let i1 = i;
let i2 = {
let mut i2 = i1;
while let Some(&(i, c)) = iter.peek() {
if is_ident_char(c) {
iter.next();
} else {
i2 = i;
break;
}
}
i2
};
token = Some(Token::TypeName((self.bytes_consumed + i1, &self.text[i1..i2])));
}
// Ident
else if c == '$' {
iter.next();
let i1 = i;
let i2 = {
let mut i2 = i1;
let mut escaped = false;
while let Some(&(i, c)) = iter.peek() {
if escaped {
escaped = false;
} else if c == '\\' {
escaped = true;
} else if !is_ident_char(c) {
i2 = i;
break;
}
iter.next();
}
i2
};
token = Some(Token::Ident((self.bytes_consumed + i1, &self.text[i1..i2])));
}
// Structural characters
else if is_reserved_char(c) {
iter.next();
match c {
'{' => {
token = Some(Token::OpenInner(self.bytes_consumed + i));
}
'}' => {
token = Some(Token::CloseInner(self.bytes_consumed + i));
}
'[' => {
self.after_open_leaf = true;
token = Some(Token::OpenLeaf(self.bytes_consumed + i));
}
']' => {
token = Some(Token::CloseLeaf(self.bytes_consumed + i));
}
_ => {
token = Some(Token::Unknown(self.bytes_consumed + i));
}
}
}
}
}
// Leaf contents
else if let Some(&(i, _)) = iter.peek() {
self.after_open_leaf = false;
let i1 = i;
let i2 = {
let mut i2 = i1;
let mut escaped = false;
while let Some(&(i, c)) = iter.peek() {
if escaped {
escaped = false;
} else if c == '\\' {
escaped = true;
} else if c == ']' {
i2 = i;
break;
}
iter.next();
}
i2
};
token = Some(Token::LeafContents((self.bytes_consumed + i1, &self.text[i1..i2])));
}
// Finish up
match iter.peek() {
Some(&(i, _)) => {
self.bytes_consumed += i;
self.text = &self.text[i..];
}
None => {
self.text = "";
}
}
return token;
}
}
// ================================================================
/// Returns whether the given unicode character is whitespace or not.
fn is_ws_char(c: char) -> bool {
match c {
' ' | '\t' => true,
_ => false,
}
}
/// Returns whether the given utf character is a newline or not.
fn is_nl_char(c: char) -> bool {
match c {
'\n' | '\r' => true,
_ => false,
}
}
/// Returns whether the given utf character is a comment starter or not.
fn is_comment_char(c: char) -> bool {
c == '#'
}
/// Returns whether the given utf character is a reserved character or not.
fn is_reserved_char(c: char) -> bool {
match c {
'{' | '}' | '[' | ']' | '\\' | '$' => true,
_ => false,
}
}
/// Returns whether the given utf character is a legal identifier character or not.
fn is_ident_char(c: char) -> bool {
// Anything that isn't whitespace, reserved, or an operator character
if !is_ws_char(c) && !is_nl_char(c) && !is_reserved_char(c) && !is_comment_char(c) {
true
} else {
false
}
}
// ================================================================
#[cfg(test)]
mod tests {
use super::{token_iter, Token};
#[test]
fn token_iter_1() {
let s = r#"
# This is a comment and should be skipped
MyThing $ident { # This is another comment
MyProp [Some content]
}
"#;
let mut ti = token_iter(s);
assert_eq!(ti.next(), Some(Token::TypeName((67, "MyThing"))));
assert_eq!(ti.next(), Some(Token::Ident((75, "$ident"))));
assert_eq!(ti.next(), Some(Token::OpenInner(82)));
assert_eq!(ti.next(), Some(Token::TypeName((126, "MyProp"))));
assert_eq!(ti.next(), Some(Token::OpenLeaf(133)));
assert_eq!(ti.next(), Some(Token::LeafContents((134, "Some content"))));
assert_eq!(ti.next(), Some(Token::CloseLeaf(146)));
assert_eq!(ti.next(), Some(Token::CloseInner(160)));
assert_eq!(ti.next(), None);
}
#[test]
fn token_iter_2() {
let s = r#"MyProp [Some content\] with \escaped \\characters]"#;
let mut ti = token_iter(s);
assert_eq!(ti.next(), Some(Token::TypeName((0, "MyProp"))));
assert_eq!(ti.next(), Some(Token::OpenLeaf(7)));
assert_eq!(ti.next(),
Some(Token::LeafContents((8, r#"Some content\] with \escaped \\characters"#))));
assert_eq!(ti.next(), Some(Token::CloseLeaf(49)));
assert_eq!(ti.next(), None);
}
#[test]
fn token_iter_3() {
let s = r#"MyThing $\ an\ ident\$\ with\\\{\[\ \#escaped\ content {}"#;
let mut ti = token_iter(s);
assert_eq!(ti.next(), Some(Token::TypeName((0, "MyThing"))));
assert_eq!(ti.next(),
Some(Token::Ident((8, r#"$\ an\ ident\$\ with\\\{\[\ \#escaped\ content"#))));
assert_eq!(ti.next(), Some(Token::OpenInner(55)));
assert_eq!(ti.next(), Some(Token::CloseInner(56)));
assert_eq!(ti.next(), None);
}
}

View File

@ -9,7 +9,6 @@ mod ray;
mod bbox; mod bbox;
mod camera; mod camera;
mod parse; mod parse;
mod datatree;
mod renderer; mod renderer;
mod image; mod image;
mod triangle; mod triangle;

View File

@ -1,161 +1,200 @@
#![allow(dead_code)] #![allow(dead_code)]
use std::result; use std::result::Result;
use std::cmp::Eq; use std::cmp::Eq;
#[derive(Debug)] #[derive(Debug, Eq, PartialEq)]
pub enum DataTree<'a> { pub enum DataTree<'a> {
Internal { Internal {
type_: &'a str, type_name: &'a str,
name: Option<&'a str>, ident: Option<&'a str>,
children: Vec<DataTree<'a>>, children: Vec<DataTree<'a>>,
}, },
Leaf { Leaf {
type_: &'a str, type_name: &'a str,
contents: &'a str, contents: &'a str,
}, },
} }
impl<'a> DataTree<'a> { impl<'a> DataTree<'a> {
pub fn from_str(source_text: &'a str) -> Option<Vec<DataTree<'a>>> { pub fn from_str(source_text: &'a str) -> Result<DataTree<'a>, ParseError> {
let mut items = Vec::new(); let mut items = Vec::new();
let mut remaining_text = source_text; let mut remaining_text = (0, source_text);
while let Ok((item, text)) = parse(remaining_text) { while let Some((item, text)) = try!(parse_node(remaining_text)) {
remaining_text = text; remaining_text = text;
items.push(item); items.push(item);
} }
remaining_text = skip_ws_and_comments(remaining_text); remaining_text = skip_ws_and_comments(remaining_text);
if remaining_text.len() > 0 { if remaining_text.1.len() == 0 {
return None; return Ok(DataTree::Internal {
type_name: "ROOT",
ident: None,
children: items,
});
} else { } else {
return Some(items); // If the whole text wasn't parsed, something went wrong.
return Err(ParseError::Other((0, "Failed to parse the entire string.")));
}
}
// For unit tests
fn internal_data(&'a self) -> (&'a str, Option<&'a str>, &'a Vec<DataTree<'a>>) {
if let DataTree::Internal { type_name, ident, ref children } = *self {
(type_name, ident, children)
} else {
panic!("Expected DataTree::Internal, found DataTree::Leaf")
}
}
fn leaf_data(&'a self) -> (&'a str, &'a str) {
if let DataTree::Leaf { type_name, contents } = *self {
(type_name, contents)
} else {
panic!("Expected DataTree::Leaf, found DataTree::Internal")
} }
} }
} }
#[derive(Copy, Clone, Eq, PartialEq, Debug)]
pub enum ParseError {
MissingOpener(usize),
MissingOpenInternal(usize),
MissingCloseInternal(usize),
MissingOpenLeaf(usize),
MissingCloseLeaf(usize),
MissingTypeName(usize),
UnexpectedIdent(usize),
UnknownToken(usize),
Other((usize, &'static str)),
}
// ================================================================
#[derive(Debug, PartialEq, Eq)] #[derive(Debug, PartialEq, Eq)]
pub enum Token<'a> { enum Token<'a> {
OpenInner, OpenInner,
CloseInner, CloseInner,
OpenLeaf, OpenLeaf,
CloseLeaf, CloseLeaf,
Type(&'a str), TypeName(&'a str),
Name(&'a str), Ident(&'a str),
End, End,
Unknown, Unknown,
} }
type ParseResult<'a> = result::Result<(DataTree<'a>, &'a str), ()>; type ParseResult<'a> = Result<Option<(DataTree<'a>, (usize, &'a str))>, ParseError>;
fn parse_node<'a>(source_text: (usize, &'a str)) -> ParseResult<'a> {
fn parse<'a>(source_text: &'a str) -> ParseResult<'a> {
let (token, text1) = next_token(source_text); let (token, text1) = next_token(source_text);
if let Token::TypeName(type_name) = token {
if let Token::Type(t) = token {
match next_token(text1) { match next_token(text1) {
// Inner with name // Internal with name
(Token::Name(n), text2) => { (Token::Ident(n), text2) => {
if let (Token::OpenInner, text3) = next_token(text2) { if let (Token::OpenInner, text3) = next_token(text2) {
let mut children = Vec::new(); let mut children = Vec::new();
let mut text_remaining = text3; let mut text_remaining = text3;
while let Ok((node, text4)) = parse(text_remaining) { while let Some((node, text4)) = try!(parse_node(text_remaining)) {
text_remaining = text4; text_remaining = text4;
children.push(node); children.push(node);
} }
if let (Token::CloseInner, text4) = next_token(text_remaining) { if let (Token::CloseInner, text4) = next_token(text_remaining) {
return Ok((DataTree::Internal { return Ok(Some((DataTree::Internal {
type_: t, type_name: type_name,
name: Some(n), ident: Some(n),
children: children, children: children,
}, },
text4)); text4)));
} else { } else {
return Err(()); return Err(ParseError::MissingCloseInternal(text_remaining.0));
} }
} else { } else {
return Err(()); return Err(ParseError::MissingOpenInternal(text2.0));
} }
} }
// Inner without name // Internal without name
(Token::OpenInner, text2) => { (Token::OpenInner, text2) => {
let mut children = Vec::new(); let mut children = Vec::new();
let mut text_remaining = text2; let mut text_remaining = text2;
while let Ok((node, text3)) = parse(text_remaining) { while let Some((node, text3)) = try!(parse_node(text_remaining)) {
text_remaining = text3; text_remaining = text3;
children.push(node); children.push(node);
} }
if let (Token::CloseInner, text3) = next_token(text_remaining) { if let (Token::CloseInner, text3) = next_token(text_remaining) {
return Ok((DataTree::Internal { return Ok(Some((DataTree::Internal {
type_: t, type_name: type_name,
name: None, ident: None,
children: children, children: children,
}, },
text3)); text3)));
} else { } else {
return Err(()); return Err(ParseError::MissingCloseInternal(text_remaining.0));
} }
} }
// Leaf // Leaf
(Token::OpenLeaf, text2) => { (Token::OpenLeaf, text2) => {
if let Ok((lc, text3)) = parse_leaf_content(text2) { let (contents, text3) = parse_leaf_content(text2);
if let (Token::CloseLeaf, text4) = next_token(text3) { if let (Token::CloseLeaf, text4) = next_token(text3) {
return Ok((DataTree::Leaf { return Ok(Some((DataTree::Leaf {
type_: t, type_name: type_name,
contents: lc, contents: contents,
}, },
text4)); text4)));
} else {
return Err(());
}
} else { } else {
return Err(()); return Err(ParseError::MissingCloseLeaf(text3.0));
} }
} }
// Other // Other
_ => { _ => {
return Err(()); return Err(ParseError::MissingOpener(text1.0));
} }
} }
} else { } else {
return Err(()); return Ok(None);
} }
} }
fn parse_leaf_content<'a>(source_text: &'a str) -> result::Result<(&'a str, &'a str), ()> { fn parse_leaf_content<'a>(source_text: (usize, &'a str)) -> (&'a str, (usize, &'a str)) {
let mut escape = false; let mut si = 1;
let mut escaped = false;
for (i, c) in source_text.char_indices() { let mut reached_end = true;
if escape { for (i, c) in source_text.1.char_indices() {
escape = false; si = i;
continue; if escaped {
} escaped = false;
if c == ']' {
return Ok((&source_text[0..i], &source_text[i..]));
} else if c == '\\' { } else if c == '\\' {
escape = true; escaped = true;
} else if c == ']' {
reached_end = false;
break;
} }
} }
return Err(()); if reached_end {
si = source_text.1.len();
}
return (&source_text.1[0..si],
(source_text.0 + si, &source_text.1[si..]));
} }
pub fn next_token<'a>(source_text: &'a str) -> (Token<'a>, &'a str) { fn next_token<'a>(source_text: (usize, &'a str)) -> (Token<'a>, (usize, &'a str)) {
let text1 = skip_ws_and_comments(source_text); let text1 = skip_ws_and_comments(source_text);
if let Some(c) = text1.chars().nth(0) { if let Some(c) = text1.1.chars().nth(0) {
let text2 = &text1[c.len_utf8()..]; let text2 = (text1.0 + c.len_utf8(), &text1.1[c.len_utf8()..]);
match c { match c {
'{' => { '{' => {
return (Token::OpenInner, text2); return (Token::OpenInner, text2);
@ -175,46 +214,48 @@ pub fn next_token<'a>(source_text: &'a str) -> (Token<'a>, &'a str) {
'$' => { '$' => {
// Parse name // Parse name
let mut si = 0; let mut si = 1;
let mut escape = false; let mut escaped = false;
let mut broke = false; let mut reached_end = true;
for (i, c) in text1.1.char_indices().skip(1) {
for (i, c) in text2.char_indices() { si = i;
if c == '\\' { if escaped {
escape = true; escaped = false;
} else if (is_reserved_char(c) || is_ws(c)) && !escape { } else if c == '\\' {
si = i; escaped = true;
broke = true; } else if !is_ident_char(c) {
reached_end = false;
break; break;
} else {
escape = false;
} }
} }
if broke { if reached_end {
return (Token::Name(&text1[0..si + 1]), &text1[si + 1..]); si = text1.1.len();
} else {
return (Token::Name(text1), "");
} }
return (Token::Ident(&text1.1[0..si]),
(text1.0 + si, &text1.1[si..]));
} }
_ => { _ => {
// Parse type if is_ident_char(c) {
let mut si = 0; // Parse type
let mut broke = false; let mut si = 0;
let mut reached_end = true;
for (i, c) in text1.char_indices() { for (i, c) in text1.1.char_indices() {
if (is_reserved_char(c) || is_ws(c)) && c != '\\' {
si = i; si = i;
broke = true; if !is_ident_char(c) {
break; reached_end = false;
break;
}
} }
}
if broke { if reached_end {
return (Token::Type(&text1[0..si]), &text1[si..]); si = text1.1.len();
} else { }
return (Token::Type(text1), "");
return (Token::TypeName(&text1.1[0..si]),
(text1.0 + si, &text1.1[si..]));
} }
} }
@ -233,153 +274,192 @@ fn is_ws(c: char) -> bool {
} }
} }
fn is_reserved_char(c: char) -> bool { fn is_nl(c: char) -> bool {
match c { match c {
'{' | '}' | '[' | ']' | '$' | '\\' => true, '\n' | '\r' => true,
_ => false, _ => false,
} }
} }
fn skip_ws<'a>(text: &'a str) -> Option<&'a str> { fn is_reserved_char(c: char) -> bool {
match c {
'{' | '}' | '[' | ']' | '$' | '#' | '\\' => true,
_ => false,
}
}
fn is_ident_char(c: char) -> bool {
// Anything that isn't whitespace or a reserved character
!is_ws(c) && !is_reserved_char(c)
}
fn skip_ws<'a>(text: &'a str) -> &'a str {
let mut si = 0;
let mut reached_end = true;
for (i, c) in text.char_indices() { for (i, c) in text.char_indices() {
si = i;
if !is_ws(c) { if !is_ws(c) {
if i > 0 { reached_end = false;
return Some(&text[i..]);
} else {
return None;
}
}
}
if text.len() > 0 {
return Some("");
} else {
return None;
}
}
fn skip_comment<'a>(text: &'a str) -> Option<&'a str> {
let mut tci = text.char_indices();
if let Some((_, '#')) = tci.next() {
for (i, c) in tci {
match c {
'\n' | '\r' => {
return Some(&text[i..]);
}
_ => {}
}
}
return Some("");
} else {
return None;
}
}
fn skip_ws_and_comments<'a>(text: &'a str) -> &'a str {
let mut remaining_text = text;
loop {
let mut ws = 0;
let mut comment = 0;
while let Some(t) = skip_ws(remaining_text) {
remaining_text = t;
ws += 1;
}
while let Some(t) = skip_comment(remaining_text) {
remaining_text = t;
comment += 1;
}
if ws == 0 && comment == 0 {
break; break;
} }
} }
return remaining_text; if reached_end {
si = text.len();
}
return &text[si..];
}
fn skip_comment<'a>(text: &'a str) -> &'a str {
let mut si = 0;
if Some('#') == text.chars().nth(0) {
let mut reached_end = true;
for (i, c) in text.char_indices() {
si = i;
if is_nl(c) {
reached_end = false;
break;
}
}
if reached_end {
si = text.len();
}
}
return &text[si..];
}
fn skip_ws_and_comments<'a>(text: (usize, &'a str)) -> (usize, &'a str) {
let mut remaining_text = text.1;
loop {
let tmp = skip_comment(skip_ws(remaining_text));
if tmp.len() == remaining_text.len() {
break;
} else {
remaining_text = tmp;
}
}
let offset = text.0 + text.1.len() - remaining_text.len();
return (offset, remaining_text);
} }
// ================================================================
#[cfg(test)] #[cfg(test)]
mod tests { mod tests {
use super::*; use super::*;
use super::{next_token, Token};
#[test] #[test]
fn test_tokenize_1() { fn tokenize_1() {
let input = "Thing"; let input = (0, "Thing");
assert_eq!(next_token(input), (Token::Type("Thing"), "")); assert_eq!(next_token(input), (Token::TypeName("Thing"), (5, "")));
} }
#[test] #[test]
fn test_tokenize_2() { fn tokenize_2() {
let input = " \n# gdfgdf gfdg dggdf\\sg dfgsd \n Thing"; let input = (0, " \n# gdfgdf gfdg dggdf\\sg dfgsd \n Thing");
assert_eq!(next_token(input), (Token::Type("Thing"), "")); assert_eq!(next_token(input), (Token::TypeName("Thing"), (41, "")));
} }
#[test] #[test]
fn test_tokenize_3() { fn tokenize_3() {
let input1 = " Thing { }"; let input1 = (0, " Thing { }");
let (token1, input2) = next_token(input1); let (token1, input2) = next_token(input1);
let (token2, input3) = next_token(input2); let (token2, input3) = next_token(input2);
let (token3, input4) = next_token(input3); let (token3, input4) = next_token(input3);
assert_eq!((token1, input2), (Token::Type("Thing"), " { }")); assert_eq!((token1, input2.1), (Token::TypeName("Thing"), " { }"));
assert_eq!((token2, input3), (Token::OpenInner, " }")); assert_eq!((token2, input3.1), (Token::OpenInner, " }"));
assert_eq!((token3, input4), (Token::CloseInner, "")); assert_eq!((token3, input4.1), (Token::CloseInner, ""));
} }
#[test] #[test]
fn test_tokenize_4() { fn tokenize_4() {
let input = " $hi_there "; let input = (0, " $hi_there ");
assert_eq!(next_token(input), (Token::Name("$hi_there"), " ")); assert_eq!(next_token(input), (Token::Ident("$hi_there"), (10, " ")));
} }
#[test] #[test]
fn test_tokenize_5() { fn tokenize_5() {
let input = " $hi\\ t\\#he\\[re "; let input = (0, " $hi\\ t\\#he\\[re ");
assert_eq!(next_token(input), (Token::Name("$hi\\ t\\#he\\[re"), " ")); assert_eq!(next_token(input),
(Token::Ident("$hi\\ t\\#he\\[re"), (15, " ")));
} }
#[test] #[test]
fn test_tokenize_6() { fn tokenize_6() {
let input1 = " $hi the[re"; let input1 = (0, " $hi the[re");
let (token1, input2) = next_token(input1); let (token1, input2) = next_token(input1);
let (token2, input3) = next_token(input2); let (token2, input3) = next_token(input2);
let (token3, input4) = next_token(input3); let (token3, input4) = next_token(input3);
let (token4, input5) = next_token(input4); let (token4, input5) = next_token(input4);
let (token5, input6) = next_token(input5);
assert_eq!((token1, input2), (Token::Name("$hi"), " the[re")); assert_eq!((token1, input2), (Token::Ident("$hi"), (4, " the[re")));
assert_eq!((token2, input3), (Token::Type("the"), "[re")); assert_eq!((token2, input3), (Token::TypeName("the"), (8, "[re")));
assert_eq!((token3, input4), (Token::OpenLeaf, "re")); assert_eq!((token3, input4), (Token::OpenLeaf, (9, "re")));
assert_eq!((token4, input5), (Token::Type("re"), "")); assert_eq!((token4, input5), (Token::TypeName("re"), (11, "")));
assert_eq!((token5, input6), (Token::End, (11, "")));
} }
#[test] #[test]
fn test_tokenize_7() { fn tokenize_7() {
let input1 = "Thing $yar { # A comment\n\tThing2 []\n}"; let input1 = (0, "Thing $yar { # A comment\n\tThing2 []\n}");
let (token1, input2) = next_token(input1); let (token1, input2) = next_token(input1);
let (token2, input3) = next_token(input2); let (token2, input3) = next_token(input2);
let (token3, input4) = next_token(input3); let (token3, input4) = next_token(input3);
let (token4, input5) = next_token(input4); let (token4, input5) = next_token(input4);
let (token5, input6) = next_token(input5); let (token5, input6) = next_token(input5);
let (token6, input7) = next_token(input6); let (token6, input7) = next_token(input6);
let (token7, input8) = next_token(input7);
let (token8, input9) = next_token(input8);
assert_eq!((token1, input2), assert_eq!((token1, input2),
(Token::Type("Thing"), " $yar { # A comment\n\tThing2 []\n}")); (Token::TypeName("Thing"),
(5, " $yar { # A comment\n\tThing2 []\n}")));
assert_eq!((token2, input3), assert_eq!((token2, input3),
(Token::Name("$yar"), " { # A comment\n\tThing2 []\n}")); (Token::Ident("$yar"), (10, " { # A comment\n\tThing2 []\n}")));
assert_eq!((token3, input4), assert_eq!((token3, input4),
(Token::OpenInner, " # A comment\n\tThing2 []\n}")); (Token::OpenInner, (12, " # A comment\n\tThing2 []\n}")));
assert_eq!((token4, input5), (Token::Type("Thing2"), " []\n}")); assert_eq!((token4, input5),
assert_eq!((token5, input6), (Token::OpenLeaf, "]\n}")); (Token::TypeName("Thing2"), (32, " []\n}")));
assert_eq!((token6, input7), (Token::CloseLeaf, "\n}")); assert_eq!((token5, input6), (Token::OpenLeaf, (34, "]\n}")));
assert_eq!((token6, input7), (Token::CloseLeaf, (35, "\n}")));
assert_eq!((token7, input8), (Token::CloseInner, (37, "")));
assert_eq!((token8, input9), (Token::End, (37, "")));
}
#[test]
fn parse_1() {
let input = r#"
Thing {}
"#;
let dt = DataTree::from_str(input).unwrap();
// Root
let (t, i, c) = dt.internal_data();
assert_eq!(t, "ROOT");
assert_eq!(i, None);
assert_eq!(c.len(), 1);
// First (and only) child
let (t, i, c) = c[0].internal_data();
assert_eq!(t, "Thing");
assert_eq!(i, None);
assert_eq!(c.len(), 0);
} }
} }

View File

@ -1,7 +1,6 @@
#![allow(dead_code)] #![allow(dead_code)]
use renderer::Renderer; use renderer::Renderer;
use super::DataTree; use super::DataTree;
pub fn parse_frame(tree: &DataTree) -> Renderer { pub fn parse_frame(tree: &DataTree) -> Renderer {