1 | // Copyright (c) 2001-2010 Hartmut Kaiser |
2 | // Copyright (c) 2010 Sergey "GooRoo" Olendarenko |
3 | // |
4 | // Distributed under the Boost Software License, Version 1.0. (See accompanying |
5 | // file LICENSE_1_0.txt or copy at http://www.boost.org/LICENSE_1_0.txt) |
6 | |
7 | #include <cstdlib> |
8 | #include <iostream> |
9 | #include <locale> |
10 | #include <string> |
11 | |
12 | #include <boost/spirit/include/lex_lexertl.hpp> |
13 | #include <boost/phoenix/function.hpp> |
14 | #include <boost/phoenix/operator.hpp> |
15 | |
16 | #include <boost/core/lightweight_test.hpp> |
17 | |
18 | namespace lex = boost::spirit::lex; |
19 | namespace phoenix = boost::phoenix; |
20 | |
21 | typedef std::basic_string<wchar_t> wstring_type; |
22 | |
23 | /////////////////////////////////////////////////////////////////////////////// |
24 | enum tokenids |
25 | { |
26 | ID_IDENT = 1, |
27 | ID_CONSTANT, |
28 | ID_OPERATION, |
29 | ID_BRACKET |
30 | }; |
31 | |
32 | /////////////////////////////////////////////////////////////////////////////// |
33 | struct test_data |
34 | { |
35 | tokenids tokenid; |
36 | wstring_type value; |
37 | }; |
38 | |
39 | // alpha+x1*(2.836-x2[i]) |
40 | test_data data[] = |
41 | { |
42 | { .tokenid: ID_IDENT, .value: L"alpha" }, |
43 | { .tokenid: ID_OPERATION, .value: L"+" }, |
44 | { .tokenid: ID_IDENT, .value: L"x1" }, |
45 | { .tokenid: ID_OPERATION, .value: L"*" }, |
46 | { .tokenid: ID_BRACKET, .value: L"(" }, |
47 | { .tokenid: ID_CONSTANT, .value: L"2.836" }, |
48 | { .tokenid: ID_OPERATION, .value: L"-" }, |
49 | { .tokenid: ID_IDENT, .value: L"x2" }, |
50 | { .tokenid: ID_BRACKET, .value: L"[" }, |
51 | { .tokenid: ID_IDENT, .value: L"i" }, |
52 | { .tokenid: ID_BRACKET, .value: L"]" }, |
53 | { .tokenid: ID_BRACKET, .value: L")" } |
54 | }; |
55 | |
56 | /////////////////////////////////////////////////////////////////////////////// |
57 | struct test_impl |
58 | { |
59 | typedef void result_type; |
60 | template <typename TokenId, typename Value> |
61 | struct result { typedef void type; }; |
62 | |
63 | template <typename TokenId, typename Value> |
64 | void operator()(TokenId const& tokenid, Value const& val) const |
65 | { |
66 | BOOST_TEST(sequence_counter < sizeof(data)/sizeof(data[0])); |
67 | BOOST_TEST(data[sequence_counter].tokenid == tokenids(tokenid)); |
68 | BOOST_TEST(0 == val.which()); |
69 | |
70 | typedef boost::iterator_range<wstring_type::iterator> iterator_range; |
71 | iterator_range r = boost::get<iterator_range>(val); |
72 | BOOST_TEST(data[sequence_counter].value == |
73 | wstring_type(r.begin(), r.end())); |
74 | |
75 | ++sequence_counter; |
76 | } |
77 | |
78 | static std::size_t sequence_counter; |
79 | }; |
80 | std::size_t test_impl::sequence_counter = 0; |
81 | |
82 | phoenix::function<test_impl> const test = test_impl(); |
83 | |
84 | /////////////////////////////////////////////////////////////////////////////// |
85 | template <typename Lexer> |
86 | struct mega_tokens : lex::lexer<Lexer> |
87 | { |
88 | mega_tokens() |
89 | : identifier(L"[a-zA-Z_][a-zA-Z0-9_]*" , ID_IDENT) |
90 | , constant (L"[0-9]+(\\.[0-9]+)?" , ID_CONSTANT) |
91 | , operation (L"[\\+\\-\\*/]" , ID_OPERATION) |
92 | , bracket (L"[\\(\\)\\[\\]]" , ID_BRACKET) |
93 | { |
94 | using lex::_tokenid; |
95 | using lex::_val; |
96 | |
97 | this->self |
98 | = operation [ test(_tokenid, _val) ] |
99 | | identifier [ test(_tokenid, _val) ] |
100 | | constant [ test(_tokenid, _val) ] |
101 | | bracket [ test(_tokenid, _val) ] |
102 | ; |
103 | } |
104 | |
105 | lex::token_def<wstring_type, wchar_t, tokenids> identifier; |
106 | lex::token_def<double, wchar_t, tokenids> constant; |
107 | lex::token_def<wchar_t, wchar_t, tokenids> operation; |
108 | lex::token_def<wchar_t, wchar_t, tokenids> bracket; |
109 | }; |
110 | |
111 | /////////////////////////////////////////////////////////////////////////////// |
112 | int main() |
113 | { |
114 | typedef wstring_type::iterator base_iterator; |
115 | typedef lex::lexertl::token< |
116 | base_iterator, boost::mpl::vector<wchar_t, wstring_type, double> |
117 | , boost::mpl::true_, tokenids |
118 | > token_type; |
119 | typedef lex::lexertl::actor_lexer<token_type> lexer_type; |
120 | |
121 | mega_tokens<lexer_type> mega_lexer; |
122 | |
123 | wstring_type exampleStr = L"alpha+x1*(2.836-x2[i])" ; |
124 | base_iterator first = exampleStr.begin(); |
125 | |
126 | BOOST_TEST(lex::tokenize(first, exampleStr.end(), mega_lexer)); |
127 | BOOST_TEST(test_impl::sequence_counter == sizeof(data)/sizeof(data[0])); |
128 | |
129 | return boost::report_errors(); |
130 | } |
131 | |