1// Copyright (c) 2001-2011 Hartmut Kaiser
2// Copyright (c) 2010 Mathias Gaunard
3//
4// Distributed under the Boost Software License, Version 1.0. (See accompanying
5// file LICENSE_1_0.txt or copy at http://www.boost.org/LICENSE_1_0.txt)
6
7// lexers with semantic actions attached to state '*' tokens
8
9#include <boost/spirit/include/lex_lexertl.hpp>
10
11#include <boost/core/lightweight_test.hpp>
12#include <boost/phoenix/operator/self.hpp>
13
14#include <sstream>
15
16namespace spirit = boost::spirit;
17namespace lex = spirit::lex;
18
19typedef char const* content_iterator;
20
21struct identifier
22{
23 identifier(content_iterator, content_iterator)
24 {
25 }
26};
27
28struct string_literal
29{
30 string_literal(content_iterator, content_iterator)
31 {
32 }
33};
34
35typedef lex::lexertl::token<
36 content_iterator, boost::mpl::vector<identifier, string_literal>
37> token_type;
38
39struct lexer
40 : lex::lexer<lex::lexertl::actor_lexer<token_type> >
41{
42 lexer()
43 : id("[a-zA-Z0-9]+", 1)
44 , st("'[^'\\n]*'", 2)
45 {
46 self("ST") =
47 st [ lex::_state = "INITIAL" ]
48 ;
49
50 self("*") =
51 id [ lex::_state = "ST" ]
52 | lex::token_def<>(".", 3) [ lex::_state = "ST" ]
53 ;
54 }
55
56 lex::token_def<identifier> id;
57 lex::token_def<string_literal> st;
58};
59
60typedef lexer::iterator_type token_iterator;
61
62int main()
63{
64 std::string const s = "foo 'bar'";
65
66 content_iterator begin = s.data();
67 content_iterator end = s.data() + s.size();
68
69 lexer l;
70 token_iterator begin2 = l.begin(first&: begin, last: end, initial_state: "ST");
71 token_iterator end2 = l.end();
72
73 std::size_t test_data[] = { 1, 3, 2 };
74 std::size_t const test_data_size = sizeof(test_data)/sizeof(test_data[0]);
75
76 token_iterator it = begin2;
77 std::size_t i = 0;
78 for (/**/; it != end2 && i < test_data_size; ++it, ++i)
79 {
80 BOOST_TEST(it->id() == test_data[i]);
81 }
82 BOOST_TEST(it == end2);
83 BOOST_TEST(i == test_data_size);
84
85 return boost::report_errors();
86}
87

source code of boost/libs/spirit/test/lex/state_any_token_semact.cpp