2
0
mirror of https://github.com/boostorg/wave.git synced 2026-01-19 04:42:16 +00:00

Merge pull request #109 from jefftrull/feature/cpp20-tokens

Introduce C++20 tokens
This commit is contained in:
Jeff Trull
2020-08-16 11:55:39 -07:00
committed by GitHub
16 changed files with 6616 additions and 5752 deletions

View File

@@ -54,13 +54,20 @@ NonDigit = [a-zA-Z_$] | UniversalChar;
"case" { BOOST_WAVE_RET(T_CASE); }
"catch" { BOOST_WAVE_RET(T_CATCH); }
"char" { BOOST_WAVE_RET(T_CHAR); }
"char8_t" { BOOST_WAVE_RET(s->act_in_cpp2a_mode ? T_CHAR8_T : T_IDENTIFIER); }
"char16_t" { BOOST_WAVE_RET(s->act_in_cpp0x_mode ? T_CHAR16_T : T_IDENTIFIER); }
"char32_t" { BOOST_WAVE_RET(s->act_in_cpp0x_mode ? T_CHAR32_T : T_IDENTIFIER); }
"class" { BOOST_WAVE_RET(T_CLASS); }
"concept" { BOOST_WAVE_RET(s->act_in_cpp2a_mode ? T_CONCEPT : T_IDENTIFIER); }
"const" { BOOST_WAVE_RET(T_CONST); }
"consteval" { BOOST_WAVE_RET(s->act_in_cpp2a_mode ? T_CONSTEVAL : T_IDENTIFIER); }
"constexpr" { BOOST_WAVE_RET(s->act_in_cpp0x_mode ? T_CONSTEXPR : T_IDENTIFIER); }
"constinit" { BOOST_WAVE_RET(s->act_in_cpp2a_mode ? T_CONSTINIT : T_IDENTIFIER); }
"const_cast" { BOOST_WAVE_RET(T_CONSTCAST); }
"continue" { BOOST_WAVE_RET(T_CONTINUE); }
"co_await" { BOOST_WAVE_RET(s->act_in_cpp2a_mode ? T_CO_AWAIT : T_IDENTIFIER); }
"co_return" { BOOST_WAVE_RET(s->act_in_cpp2a_mode ? T_CO_RETURN : T_IDENTIFIER); }
"co_yield" { BOOST_WAVE_RET(s->act_in_cpp2a_mode ? T_CO_YIELD : T_IDENTIFIER); }
"decltype" { BOOST_WAVE_RET(s->act_in_cpp0x_mode ? T_DECLTYPE : T_IDENTIFIER); }
"default" { BOOST_WAVE_RET(T_DEFAULT); }
"delete" { BOOST_WAVE_RET(T_DELETE); }
@@ -93,6 +100,7 @@ NonDigit = [a-zA-Z_$] | UniversalChar;
"public" { BOOST_WAVE_RET(T_PUBLIC); }
"register" { BOOST_WAVE_RET(T_REGISTER); }
"reinterpret_cast" { BOOST_WAVE_RET(T_REINTERPRETCAST); }
"requires" { BOOST_WAVE_RET(s->act_in_cpp2a_mode ? T_REQUIRES : T_IDENTIFIER); }
"return" { BOOST_WAVE_RET(T_RETURN); }
"short" { BOOST_WAVE_RET(T_SHORT); }
"signed" { BOOST_WAVE_RET(T_SIGNED); }

File diff suppressed because it is too large Load Diff

View File

@@ -153,6 +153,14 @@ lexer<IteratorT, PositionT, TokenT>::lexer(IteratorT const &first,
#else
scanner.act_in_cpp0x_mode = false;
#endif
#if BOOST_WAVE_SUPPORT_CPP2A != 0
scanner.act_in_cpp2a_mode = boost::wave::need_cpp2a(language_);
scanner.act_in_cpp0x_mode = boost::wave::need_cpp2a(language_)
|| boost::wave::need_cpp0x(language_);
#else
scanner.act_in_cpp2a_mode = false;
#endif
}
template <typename IteratorT, typename PositionT, typename TokenT>

View File

@@ -74,6 +74,7 @@ struct Scanner {
bool enable_import_keyword; /* recognize import as a keyword */
bool single_line_only; /* don't report missing eol's in C++ comments */
bool act_in_cpp0x_mode; /* lexer works in C++11 mode */
bool act_in_cpp2a_mode; /* lexer works in C++20 mode */
};
///////////////////////////////////////////////////////////////////////////////

View File

@@ -53,13 +53,20 @@ NonDigit = [a-zA-Z_] | UniversalChar;
"case" { BOOST_WAVE_RET(T_CASE); }
"catch" { BOOST_WAVE_RET(T_CATCH); }
"char" { BOOST_WAVE_RET(T_CHAR); }
"char8_t" { BOOST_WAVE_RET(s->act_in_cpp2a_mode ? T_CHAR8_T : T_IDENTIFIER); }
"char16_t" { BOOST_WAVE_RET(s->act_in_cpp0x_mode ? T_CHAR16_T : T_IDENTIFIER); }
"char32_t" { BOOST_WAVE_RET(s->act_in_cpp0x_mode ? T_CHAR32_T : T_IDENTIFIER); }
"class" { BOOST_WAVE_RET(T_CLASS); }
"concept" { BOOST_WAVE_RET(s->act_in_cpp2a_mode ? T_CONCEPT : T_IDENTIFIER); }
"const" { BOOST_WAVE_RET(T_CONST); }
"consteval" { BOOST_WAVE_RET(s->act_in_cpp2a_mode ? T_CONSTEVAL : T_IDENTIFIER); }
"constexpr" { BOOST_WAVE_RET(s->act_in_cpp0x_mode ? T_CONSTEXPR : T_IDENTIFIER); }
"constinit" { BOOST_WAVE_RET(s->act_in_cpp2a_mode ? T_CONSTINIT : T_IDENTIFIER); }
"const_cast" { BOOST_WAVE_RET(T_CONSTCAST); }
"continue" { BOOST_WAVE_RET(T_CONTINUE); }
"co_await" { BOOST_WAVE_RET(s->act_in_cpp2a_mode ? T_CO_AWAIT : T_IDENTIFIER); }
"co_return" { BOOST_WAVE_RET(s->act_in_cpp2a_mode ? T_CO_RETURN : T_IDENTIFIER); }
"co_yield" { BOOST_WAVE_RET(s->act_in_cpp2a_mode ? T_CO_YIELD : T_IDENTIFIER); }
"decltype" { BOOST_WAVE_RET(s->act_in_cpp0x_mode ? T_DECLTYPE : T_IDENTIFIER); }
"default" { BOOST_WAVE_RET(T_DEFAULT); }
"delete" { BOOST_WAVE_RET(T_DELETE); }
@@ -92,6 +99,7 @@ NonDigit = [a-zA-Z_] | UniversalChar;
"public" { BOOST_WAVE_RET(T_PUBLIC); }
"register" { BOOST_WAVE_RET(T_REGISTER); }
"reinterpret_cast" { BOOST_WAVE_RET(T_REINTERPRETCAST); }
"requires" { BOOST_WAVE_RET(s->act_in_cpp2a_mode ? T_REQUIRES : T_IDENTIFIER); }
"return" { BOOST_WAVE_RET(T_RETURN); }
"short" { BOOST_WAVE_RET(T_SHORT); }
"signed" { BOOST_WAVE_RET(T_SIGNED); }

File diff suppressed because it is too large Load Diff

View File

@@ -301,6 +301,17 @@ enum token_id {
T_THREADLOCAL = TOKEN_FROM_ID(431, KeywordTokenType),
T_RAWSTRINGLIT = TOKEN_FROM_ID(432, StringLiteralTokenType),
// C++20 keywords
T_CHAR8_T = TOKEN_FROM_ID(433, KeywordTokenType),
T_CONCEPT = TOKEN_FROM_ID(434, KeywordTokenType),
T_CONSTEVAL = TOKEN_FROM_ID(435, KeywordTokenType),
T_CONSTINIT = TOKEN_FROM_ID(436, KeywordTokenType),
T_CO_AWAIT = TOKEN_FROM_ID(437, KeywordTokenType),
T_CO_RETURN = TOKEN_FROM_ID(438, KeywordTokenType),
T_CO_YIELD = TOKEN_FROM_ID(439, KeywordTokenType),
T_REQUIRES = TOKEN_FROM_ID(440, KeywordTokenType),
T_LAST_TOKEN_ID,
T_LAST_TOKEN = ID_FROM_TOKEN(T_LAST_TOKEN_ID & ~PPTokenFlag),

View File

@@ -58,6 +58,7 @@ namespace lexer {
#define INIT_DATA_CPP_SIZE 15
#define INIT_DATA_PP_NUMBER_SIZE 2
#define INIT_DATA_CPP0X_SIZE 15
#define INIT_DATA_CPP2A_SIZE 9
///////////////////////////////////////////////////////////////////////////////
//
@@ -114,6 +115,7 @@ private:
static typename base_type::lexer_data const init_data_cpp[INIT_DATA_CPP_SIZE]; // C++ only patterns
static typename base_type::lexer_data const init_data_pp_number[INIT_DATA_PP_NUMBER_SIZE]; // pp-number only patterns
static typename base_type::lexer_data const init_data_cpp0x[INIT_DATA_CPP0X_SIZE]; // C++0X only patterns
static typename base_type::lexer_data const init_data_cpp2a[INIT_DATA_CPP2A_SIZE]; // C++2A only patterns
};
///////////////////////////////////////////////////////////////////////////////
@@ -432,7 +434,7 @@ lexer<IteratorT, PositionT>::init_data_pp_number[INIT_DATA_PP_NUMBER_SIZE] =
};
///////////////////////////////////////////////////////////////////////////////
// C++ only token definitions
// C++11 only token definitions
#define T_EXTCHARLIT token_id(T_CHARLIT|AltTokenType)
#define T_EXTSTRINGLIT token_id(T_STRINGLIT|AltTokenType)
@@ -459,10 +461,30 @@ lexer<IteratorT, PositionT>::init_data_cpp0x[INIT_DATA_CPP0X_SIZE] =
TOKEN_DATA(NOEXCEPT, "noexcept"),
TOKEN_DATA(NULLPTR, "nullptr"),
TOKEN_DATA(STATICASSERT, "static_assert"),
TOKEN_DATA(THREADLOCAL, "threadlocal"),
TOKEN_DATA(THREADLOCAL, "thread_local"),
{ token_id(0) } // this should be the last entry
};
///////////////////////////////////////////////////////////////////////////////
// C++11 only token definitions
template <typename IteratorT, typename PositionT>
typename lexer_base<IteratorT, PositionT>::lexer_data const
lexer<IteratorT, PositionT>::init_data_cpp2a[INIT_DATA_CPP2A_SIZE] =
{
TOKEN_DATA(CHAR8_T, "char8_t"),
TOKEN_DATA(CONCEPT, "concept"),
TOKEN_DATA(CONSTEVAL, "consteval"),
TOKEN_DATA(CONSTINIT, "constinit"),
TOKEN_DATA(CO_AWAIT, "co_await"),
TOKEN_DATA(CO_RETURN, "co_return"),
TOKEN_DATA(CO_YIELD, "co_yield"),
TOKEN_DATA(REQUIRES, "requires"),
{ token_id(0) } // this should be the last entry
};
///////////////////////////////////////////////////////////////////////////////
// undefine macros, required for regular expression definitions
#undef INCLUDEDEF
@@ -528,9 +550,9 @@ lexer<IteratorT, PositionT>::init_dfa(boost::wave::language_support lang)
}
}
// if in C++0x mode, add all new keywords
// if in C++0x mode, add appropriate keywords
#if BOOST_WAVE_SUPPORT_CPP0X != 0
if (boost::wave::need_cpp0x(lang)) {
if (boost::wave::need_cpp0x(lang) || boost::wave::need_cpp2a(lang)) {
for (int j = 0; 0 != init_data_cpp0x[j].tokenid; ++j) {
this->register_regex(init_data_cpp0x[j].tokenregex,
init_data_cpp0x[j].tokenid, init_data_cpp0x[j].tokencb,
@@ -539,6 +561,18 @@ lexer<IteratorT, PositionT>::init_dfa(boost::wave::language_support lang)
}
#endif
// if in C++2a mode, add those keywords
#if BOOST_WAVE_SUPPORT_CPP2A != 0
if (wave::need_cpp2a(lang)) {
for (int j = 0; 0 != init_data_cpp2a[j].tokenid; ++j) {
this->register_regex(init_data_cpp2a[j].tokenregex,
init_data_cpp2a[j].tokenid,
init_data_cpp2a[j].tokencb,
init_data_cpp2a[j].lexerstate);
}
}
#endif
for (int i = 0; 0 != init_data[i].tokenid; ++i) {
this->register_regex(init_data[i].tokenregex, init_data[i].tokenid,
init_data[i].tokencb, init_data[i].lexerstate);

View File

@@ -57,7 +57,9 @@ namespace boost { namespace wave { namespace cpplexer { namespace lexertl
#endif
#define INIT_DATA_CPP_SIZE 15
#define INIT_DATA_PP_NUMBER_SIZE 2
#define INIT_MACRO_DATA_SIZE 27
#define INIT_DATA_CPP0X_SIZE 15
#define INIT_DATA_CPP2A_SIZE 9
#define INIT_MACRO_DATA_SIZE 28
#endif // #if BOOST_WAVE_LEXERTL_USE_STATIC_TABLES == 0
// this is just a hack to have a unique token id not otherwise used by Wave
@@ -117,6 +119,8 @@ private:
static lexer_data const init_data[INIT_DATA_SIZE]; // common patterns
static lexer_data const init_data_cpp[INIT_DATA_CPP_SIZE]; // C++ only patterns
static lexer_data const init_data_pp_number[INIT_DATA_PP_NUMBER_SIZE]; // pp-number only patterns
static lexer_data const init_data_cpp0x[INIT_DATA_CPP0X_SIZE]; // C++0X only patterns
static lexer_data const init_data_cpp2a[INIT_DATA_CPP2A_SIZE]; // C++2A only patterns
// helper for calculation of the time of last compilation
static boost::wave::util::time_conversion_helper compilation_time;
@@ -167,6 +171,7 @@ lexertl<Iterator, Position>::init_macro_data[INIT_MACRO_DATA_SIZE] =
#endif
MACRO_DATA("FLOAT_SUFFIX", "(" "[fF][lL]?" OR "[lL][fF]?" ")"),
MACRO_DATA("CHAR_SPEC", "L?"),
MACRO_DATA("EXTCHAR_SPEC", "(" "[uU]" OR "u8" ")"),
MACRO_DATA("BACKSLASH", "(" Q("\\") OR TRI(Q("/")) ")"),
MACRO_DATA("ESCAPESEQ", "{BACKSLASH}([abfnrtv?'\"]|{BACKSLASH}|x{HEXDIGIT}+|{OCTALDIGIT}{1,3})"),
MACRO_DATA("HEXQUAD", "{HEXDIGIT}{4}"),
@@ -413,6 +418,54 @@ lexertl<Iterator, Position>::init_data_pp_number[INIT_DATA_PP_NUMBER_SIZE] =
{ token_id(0) } // this should be the last entry
};
// C++11 specific token definitions
#define T_EXTCHARLIT token_id(T_CHARLIT|AltTokenType)
#define T_EXTSTRINGLIT token_id(T_STRINGLIT|AltTokenType)
#define T_EXTRAWSTRINGLIT token_id(T_RAWSTRINGLIT|AltTokenType)
template <typename Iterator, typename Position>
typename lexertl<Iterator, Position>::lexer_data const
lexertl<Iterator, Position>::init_data_cpp0x[INIT_DATA_CPP0X_SIZE] =
{
TOKEN_DATA(T_EXTCHARLIT, "{EXTCHAR_SPEC}" "'"
"(" "{ESCAPESEQ}" OR "{UNIVERSALCHAR}" OR "[^\\n\\r\\\\']" ")+" "'"),
TOKEN_DATA(T_EXTSTRINGLIT, "{EXTCHAR_SPEC}" Q("\"")
"(" "{ESCAPESEQ}" OR "{UNIVERSALCHAR}" OR "[^\\n\\r\\\\\"]" ")*" Q("\"")),
TOKEN_DATA(T_RAWSTRINGLIT, "{CHAR_SPEC}" "R" Q("\"")
"(" "{ESCAPESEQ}" OR "{UNIVERSALCHAR}" OR "[^\\\\\"]" ")*" Q("\"")),
TOKEN_DATA(T_EXTRAWSTRINGLIT, "{EXTCHAR_SPEC}" "R" Q("\"")
"(" "{ESCAPESEQ}" OR "{UNIVERSALCHAR}" OR "[^\\\\\"]" ")*" Q("\"")),
TOKEN_DATA(T_ALIGNAS, "alignas"),
TOKEN_DATA(T_ALIGNOF, "alignof"),
TOKEN_DATA(T_CHAR16_T, "char16_t"),
TOKEN_DATA(T_CHAR32_T, "char32_t"),
TOKEN_DATA(T_CONSTEXPR, "constexpr"),
TOKEN_DATA(T_DECLTYPE, "decltype"),
TOKEN_DATA(T_NOEXCEPT, "noexcept"),
TOKEN_DATA(T_NULLPTR, "nullptr"),
TOKEN_DATA(T_STATICASSERT, "static_assert"),
TOKEN_DATA(T_THREADLOCAL, "thread_local"),
{ token_id(0) } // this should be the last entry
};
// C++20 specific token definitions
template <typename Iterator, typename Position>
typename lexertl<Iterator, Position>::lexer_data const
lexertl<Iterator, Position>::init_data_cpp2a[INIT_DATA_CPP2A_SIZE] =
{
TOKEN_DATA(T_CHAR8_T, "char8_t"),
TOKEN_DATA(T_CONCEPT, "concept"),
TOKEN_DATA(T_CONSTEVAL, "consteval"),
TOKEN_DATA(T_CONSTINIT, "constinit"),
TOKEN_DATA(T_CO_AWAIT, "co_await"),
TOKEN_DATA(T_CO_RETURN, "co_return"),
TOKEN_DATA(T_CO_YIELD, "co_yield"),
TOKEN_DATA(T_REQUIRES, "requires"),
{ token_id(0) } // this should be the last entry
};
#undef MACRO_DATA
#undef TOKEN_DATA
#undef OR
@@ -459,6 +512,27 @@ std::ifstream dfa_in("wave_lexertl_lexer.dfa", std::ios::in|std::ios::binary);
}
}
// if in C++0x mode, add appropriate keywords
#if BOOST_WAVE_SUPPORT_CPP0X != 0
if (wave::need_cpp0x(lang) || wave::need_cpp2a(lang)) {
for (int j = 0; 0 != init_data_cpp0x[j].tokenid; ++j) {
rules.add(init_data_cpp0x[j].tokenregex,
init_data_cpp0x[j].tokenid);
}
}
#endif
// if in C++2a mode, add those keywords
#if BOOST_WAVE_SUPPORT_CPP2A != 0
if (wave::need_cpp2a(lang)) {
for (int j = 0; 0 != init_data_cpp2a[j].tokenid; ++j) {
rules.add(init_data_cpp2a[j].tokenregex,
init_data_cpp2a[j].tokenid);
}
}
#endif
for (int i = 0; 0 != init_data[i].tokenid; ++i) {
rules.add(init_data[i].tokenregex, init_data[i].tokenid);
}
@@ -764,6 +838,10 @@ lexer::lexertl<
#undef INIT_MACRO_DATA_SIZE
#undef T_ANYCTRL
#undef T_EXTCHARLIT
#undef T_EXTSTRINGLIT
#undef T_EXTRAWSTRINGLIT
///////////////////////////////////////////////////////////////////////////////
//
// The new_lexer_gen<>::new_lexer function (declared in lexertl_interface.hpp)

View File

@@ -104,6 +104,8 @@ private:
static lexer_data const init_data[]; // common patterns
static lexer_data const init_data_cpp[]; // C++ only patterns
static lexer_data const init_data_cpp0x[]; // C++11 only patterns
static lexer_data const init_data_cpp2a[]; // C++20 only patterns
#if BOOST_WAVE_SUPPORT_PRAGMA_ONCE != 0
boost::wave::cpplexer::include_guards<token_type> guards;
@@ -154,6 +156,7 @@ private:
#endif
#define FLOAT_SUFFIX "(" "[fF][lL]?|[lL][fF]?" ")"
#define CHAR_SPEC "L?"
#define EXTCHAR_SPEC "(" "[uU]" OR "u8" ")"
#define BACKSLASH "(" Q("\\") OR TRI(Q("/")) ")"
#define ESCAPESEQ BACKSLASH "(" \
@@ -397,6 +400,55 @@ lexer<Iterator, Position>::init_data_cpp[] =
{ token_id(0) } // this should be the last entry
};
///////////////////////////////////////////////////////////////////////////////
// C++11 only token definitions
#define T_EXTCHARLIT token_id(T_CHARLIT|AltTokenType)
#define T_EXTSTRINGLIT token_id(T_STRINGLIT|AltTokenType)
#define T_EXTRAWSTRINGLIT token_id(T_RAWSTRINGLIT|AltTokenType)
template <typename Iterator, typename Position>
typename lexer<Iterator, Position>::lexer_data const
lexer<Iterator, Position>::init_data_cpp0x[] =
{
TOKEN_DATA(T_EXTCHARLIT, EXTCHAR_SPEC "'"
"(" ESCAPESEQ OR UNIVERSALCHAR OR "[^\\n\\r\\\\']" ")+" "'"),
TOKEN_DATA(T_EXTSTRINGLIT, EXTCHAR_SPEC Q("\"")
"(" ESCAPESEQ OR UNIVERSALCHAR OR "[^\\n\\r\\\\\"]" ")*" Q("\"")),
TOKEN_DATA(T_RAWSTRINGLIT, CHAR_SPEC "R" Q("\"")
"(" ESCAPESEQ OR UNIVERSALCHAR OR "[^\\\\\"]" ")*" Q("\"")),
TOKEN_DATA(T_EXTRAWSTRINGLIT, EXTCHAR_SPEC "R" Q("\"")
"(" ESCAPESEQ OR UNIVERSALCHAR OR "[^\\\\\"]" ")*" Q("\"")),
TOKEN_DATA(T_ALIGNAS, "alignas"),
TOKEN_DATA(T_ALIGNOF, "alignof"),
TOKEN_DATA(T_CHAR16_T, "char16_t"),
TOKEN_DATA(T_CHAR32_T, "char32_t"),
TOKEN_DATA(T_CONSTEXPR, "constexpr"),
TOKEN_DATA(T_DECLTYPE, "decltype"),
TOKEN_DATA(T_NOEXCEPT, "noexcept"),
TOKEN_DATA(T_NULLPTR, "nullptr"),
TOKEN_DATA(T_STATICASSERT, "static_assert"),
TOKEN_DATA(T_THREADLOCAL, "thread_local"),
{ token_id(0) } // this should be the last entry
};
///////////////////////////////////////////////////////////////////////////////
// C++11 only token definitions
template <typename Iterator, typename Position>
typename lexer<Iterator, Position>::lexer_data const
lexer<Iterator, Position>::init_data_cpp2a[] =
{
TOKEN_DATA(T_CHAR8_T, "char8_t"),
TOKEN_DATA(T_CONCEPT, "concept"),
TOKEN_DATA(T_CONSTEVAL, "consteval"),
TOKEN_DATA(T_CONSTINIT, "constinit"),
TOKEN_DATA(T_CO_AWAIT, "co_await"),
TOKEN_DATA(T_CO_RETURN, "co_return"),
TOKEN_DATA(T_CO_YIELD, "co_yield"),
TOKEN_DATA(T_REQUIRES, "requires"),
{ token_id(0) } // this should be the last entry
};
///////////////////////////////////////////////////////////////////////////////
// undefine macros, required for regular expression definitions
#undef INCLUDEDEF
@@ -413,6 +465,7 @@ lexer<Iterator, Position>::init_data_cpp[] =
#undef INTEGER
#undef FLOAT_SUFFIX
#undef CHAR_SPEC
#undef EXTCHAR_SPEC
#undef BACKSLASH
#undef ESCAPESEQ
#undef HEXQUAD
@@ -425,6 +478,9 @@ lexer<Iterator, Position>::init_data_cpp[] =
#undef TOKEN_DATA
#undef TOKEN_DATA_EX
#undef T_EXTCHARLIT
#undef T_EXTSTRINGLIT
#undef T_EXTRAWSTRINGLIT
///////////////////////////////////////////////////////////////////////////////
// initialize cpp lexer
template <typename Iterator, typename Position>
@@ -444,6 +500,24 @@ lexer<Iterator, Position>::lexer(Iterator const &first,
}
}
#if BOOST_WAVE_SUPPORT_CPP0X != 0
if (boost::wave::need_cpp0x(language) || boost::wave::need_cpp2a(language)) {
for (int j = 0; 0 != init_data_cpp0x[j].tokenid; ++j) {
xlexer.register_regex(init_data_cpp0x[j].tokenregex,
init_data_cpp0x[j].tokenid, init_data_cpp[j].tokencb);
}
}
#endif
#if BOOST_WAVE_SUPPORT_CPP2A != 0
if (boost::wave::need_cpp2a(language) || boost::wave::need_cpp2a(language)) {
for (int j = 0; 0 != init_data_cpp2a[j].tokenid; ++j) {
xlexer.register_regex(init_data_cpp2a[j].tokenregex,
init_data_cpp2a[j].tokenid, init_data_cpp[j].tokencb);
}
}
#endif
// tokens valid for C++ and C99
for (int i = 0; 0 != init_data[i].tokenid; ++i) {
xlexer.register_regex(init_data[i].tokenregex, init_data[i].tokenid,

View File

@@ -222,6 +222,15 @@ static char const *tok_names[] = {
/* 430 */ "STATIC_ASSERT",
/* 431 */ "THREADLOCAL",
/* 432 */ "RAWSTRINGLIT",
/* 433 */ "T_CHAR8_T",
/* 434 */ "T_CONCEPT",
/* 435 */ "T_CONSTEVAL",
/* 436 */ "T_CONSTINIT",
/* 437 */ "T_CO_AWAIT",
/* 438 */ "T_CO_RETURN",
/* 439 */ "T_CO_YIELD",
/* 440 */ "T_REQUIRES",
};
// make sure, I have not forgotten any commas (as I did more than once)
@@ -422,8 +431,17 @@ static char const *tok_values[] = {
/* 428 */ "noexcept",
/* 429 */ "nullptr",
/* 430 */ "static_assert",
/* 431 */ "threadlocal",
/* 431 */ "thread_local",
/* 432 */ "", // extrawstringlit
/* 433 */ "char8_t",
/* 434 */ "concept",
/* 435 */ "consteval",
/* 436 */ "constinit",
/* 437 */ "co_await",
/* 438 */ "co_return",
/* 439 */ "co_yield",
/* 440 */ "requires",
};
// make sure, I have not forgotten any commas (as I did more than once)

View File

@@ -198,6 +198,28 @@ lexem const lexems[] =
{ "#region", boost::wave::T_MSEXT_PP_REGION },
{ "#endregion", boost::wave::T_MSEXT_PP_ENDREGION },
#endif // BOOST_WAVE_SUPPORT_MS_EXTENSIONS != 0
#if BOOST_WAVE_SUPPORT_CPP0X != 0
{ "R\"foo(string literal)foo\"", boost::wave::T_RAWSTRINGLIT },
{ "alignas", boost::wave::T_ALIGNAS },
{ "alignof", boost::wave::T_ALIGNOF },
{ "char16_t", boost::wave::T_CHAR16_T },
{ "char32_t", boost::wave::T_CHAR32_T },
{ "constexpr", boost::wave::T_CONSTEXPR },
{ "decltype", boost::wave::T_DECLTYPE },
{ "noexcept", boost::wave::T_NOEXCEPT },
{ "nullptr", boost::wave::T_NULLPTR },
{ "thread_local", boost::wave::T_THREADLOCAL },
#endif // BOOST_WAVE_SUPPORT_CPP0X != 0
#if BOOST_WAVE_SUPPORT_CPP2A != 0
{ "char8_t", boost::wave::T_CHAR8_T },
{ "concept", boost::wave::T_CONCEPT },
{ "consteval", boost::wave::T_CONSTEVAL },
{ "constinit", boost::wave::T_CONSTINIT },
{ "co_await", boost::wave::T_CO_AWAIT },
{ "co_return", boost::wave::T_CO_RETURN },
{ "co_yield", boost::wave::T_CO_YIELD },
{ "requires", boost::wave::T_REQUIRES },
#endif
{ "#define", boost::wave::T_PP_DEFINE },
{ "#ifdef", boost::wave::T_PP_IFDEF },
{ "#ifndef", boost::wave::T_PP_IFNDEF },

View File

@@ -50,7 +50,7 @@ main(int argc, char *argv[])
token_type::string_type instr(data->token);
lexer_type it = lexer_type(instr.begin(), instr.end(), pos,
boost::wave::support_option_long_long);
boost::wave::support_cpp2a);
lexer_type end = lexer_type();
// verify the correct outcome of the tokenization

View File

@@ -56,7 +56,7 @@ main(int argc, char *argv[])
token_type::string_type instr(data->token);
lexer_type it = lexer_type(instr.begin(), instr.end(), pos,
boost::wave::support_option_long_long);
boost::wave::support_cpp2a);
lexer_type end = lexer_type();
// verify the correct outcome of the tokenization

View File

@@ -59,7 +59,7 @@ main(int argc, char *argv[])
token_type::string_type instr(data->token);
lexer_type it = lexer_type(instr.begin(), instr.end(), pos,
boost::wave::support_option_long_long);
boost::wave::support_cpp2a);
lexer_type end = lexer_type();
// verify the correct outcome of the tokenization

View File

@@ -57,7 +57,7 @@ main(int argc, char *argv[])
token_type::string_type instr(data->token);
lexer_type it = lexer_type(instr.begin(), instr.end(), pos,
boost::wave::support_option_long_long);
boost::wave::support_cpp2a);
lexer_type end = lexer_type();
// verify the correct outcome of the tokenisation