diff options
author | Tavian Barnes <tavianator@gmail.com> | 2009-10-28 19:10:53 -0400 |
---|---|---|
committer | Tavian Barnes <tavianator@gmail.com> | 2009-10-28 19:10:53 -0400 |
commit | 9fbaf333e6b4a0d1ffdf588319ecf1b5ae0ac4d8 (patch) | |
tree | 5470ef98049e76c7233c0fa97bb6745d3b885b3a | |
parent | 8ce6029a6a14c0ef003c88cb9fb29e522c022f9c (diff) | |
download | dimension-9fbaf333e6b4a0d1ffdf588319ecf1b5ae0ac4d8.tar.xz |
Make '#include' work.
-rw-r--r-- | dimension/tokenize.c | 33 | ||||
-rwxr-xr-x | tests/dimension/tokenizer.sh | 2 |
2 files changed, 34 insertions, 1 deletions
diff --git a/dimension/tokenize.c b/dimension/tokenize.c index 97b26be..72418cc 100644 --- a/dimension/tokenize.c +++ b/dimension/tokenize.c @@ -362,6 +362,39 @@ dmnsn_tokenize(FILE *file) if (dmnsn_tokenize_directive(map, size, &token, &next, &line, &col) == 0) { if (token.type == DMNSN_INCLUDE) { + /* Skip whitespace */ + while (next - map < size && isspace(*next) && *next != '\n') { + ++next; + } + + if (dmnsn_tokenize_string(map, size, &token, + &next, &line, &col) != 0) { + fprintf(stderr, + "Expected string after #include on line %u, column %u.\n", + line, col); + goto bailout; + } + + FILE *included = fopen(token.value, "r"); + if (!included) { + fprintf(stderr, "Couldn't include \"%s\" on line %u, column %u.\n", + token.value, line, col); + goto bailout; + } + + dmnsn_array *included_tokens = dmnsn_tokenize(included); + if (!included_tokens) { + fprintf(stderr, "Error tokenizing \"%s\"\n", token.value); + goto bailout; + } + + unsigned int i; + for (i = 0; i < dmnsn_array_size(included_tokens); ++i) { + dmnsn_array_push(tokens, dmnsn_array_at(included_tokens, i)); + } + + dmnsn_delete_array(included_tokens); + continue; } } else { fprintf(stderr, "Invalid directive on line %u, column %u.\n", diff --git a/tests/dimension/tokenizer.sh b/tests/dimension/tokenizer.sh index f1f2b6b..762cced 100755 --- a/tests/dimension/tokenizer.sh +++ b/tests/dimension/tokenizer.sh @@ -59,7 +59,7 @@ if [ "$labels" != "$labels_exp" ]; then fi directives=$(${top_builddir}/dimension/dimension --tokenize ${srcdir}/directives.pov) -directives_exp='(#include (string "punctuation.pov") #declare (identifier "x"))'; +directives_exp='({ \( [ < + - * / , > ] \) } #declare (identifier "x"))'; if [ "$directives" != "$directives_exp" ]; then echo "directives.pov tokenized as \"$directives\"" >&2 |