1// Copyright (c) 2001-2010 Hartmut Kaiser
2// Copyright (c) 2009 Tor Brede Vekterli
3//
4// Distributed under the Boost Software License, Version 1.0. (See accompanying
5// file LICENSE_1_0.txt or copy at http://www.boost.org/LICENSE_1_0.txt)
6
7#include <boost/spirit/include/lex_lexertl.hpp>
8#include <boost/spirit/include/qi_parse.hpp>
9#include <boost/spirit/include/qi_operator.hpp>
10#include <boost/spirit/include/qi_action.hpp>
11#include <boost/spirit/include/qi_char.hpp>
12#include <boost/spirit/include/qi_grammar.hpp>
13#include <boost/phoenix/operator.hpp>
14
15#include <boost/core/lightweight_test.hpp>
16
17#include <iostream>
18#include <string>
19
20namespace qi = boost::spirit::qi;
21namespace lex = boost::spirit::lex;
22
23enum tokenids
24{
25 IDANY = lex::min_token_id + 10 // Lower 8 bits is 0x0a, same as '\n'
26};
27
28template <typename Lexer>
29struct word_count_tokens : lex::lexer<Lexer>
30{
31 word_count_tokens()
32 {
33 this->self.add_pattern
34 ("TEST", "A")
35 ;
36 word = "{TEST}";
37 this->self.add
38 (word)
39 ('\n')
40 (".", IDANY)
41 ;
42 }
43 lex::token_def<std::string> word;
44};
45
46template <typename Iterator>
47struct word_count_grammar : qi::grammar<Iterator>
48{
49 template <typename TokenDef>
50 word_count_grammar(TokenDef const& tok)
51 : word_count_grammar::base_type(start)
52 , c(0), w(0), l(0)
53 {
54 using boost::phoenix::ref;
55 using qi::lit;
56 using qi::token;
57
58 start = *( tok.word [++ref(w)]
59 | lit('\n') [++ref(l)]
60 | token(IDANY) [++ref(c)]
61 )
62 ;
63 }
64 std::size_t c, w, l;
65 qi::rule<Iterator> start;
66};
67
68
69int main()
70{
71 typedef lex::lexertl::token<
72 const char*, boost::mpl::vector<std::string>
73 > token_type;
74
75 typedef lex::lexertl::lexer<token_type> lexer_type;
76 typedef word_count_tokens<lexer_type>::iterator_type iterator_type;
77 word_count_tokens<lexer_type> word_count; // Our lexer
78 word_count_grammar<iterator_type> g (word_count); // Our parser
79
80 std::string str ("A\nBCDEFGHI");
81 char const* first = str.c_str();
82 char const* last = &first[str.size()];
83
84 BOOST_TEST(lex::tokenize_and_parse(first, last, word_count, g));
85 BOOST_TEST(g.l == 1 && g.w == 1 && g.c == 8);
86
87 return boost::report_errors();
88}
89

source code of boost/libs/spirit/test/lex/regression_word_count.cpp