Commit c23f87e8 authored by Stephen Morris's avatar Stephen Morris
Browse files

[2396] Merge branch 'master' into trac2396

parents 261a171c 8e39d7eb
516. [bug] marcin
Fixed 'make distcheck' failure when running perfdhcp unit tests.
The unit tests used to read files from the folder specified
with the path relative to current folder, thus when the test was
run from a different folder the files could not be found.
(Trac #2479, git 4e8325e1b309f1d388a3055ec1e1df98c377f383)
515. [bug] jinmei
The in-memory data source now accepts an RRSIG provided without
a covered RRset in loading. A subsequent query for its owner name
......
......@@ -1308,7 +1308,7 @@ AC_CONFIG_FILES([Makefile
tests/tools/badpacket/tests/Makefile
tests/tools/perfdhcp/Makefile
tests/tools/perfdhcp/tests/Makefile
tests/tools/perfdhcp/templates/Makefile
tests/tools/perfdhcp/tests/testdata/Makefile
dns++.pc
])
AC_OUTPUT([doc/version.ent
......
......@@ -472,7 +472,7 @@ var/
<title>Packages</title>
<para>
Some operating systems or softare package vendors may
Some operating systems or software package vendors may
provide ready-to-use, pre-built software packages for
the BIND 10 suite.
Installing a pre-built package means you do not need to
......@@ -2157,7 +2157,7 @@ AND_MATCH := "ALL": [ RULE_RAW, RULE_RAW, ... ]
you indicate that the system is not usable without the
component and if such component fails, the system shuts
down no matter when the failure happened. This is the
behaviour of the core components (the ones you can't turn
behavior of the core components (the ones you can't turn
off), but you can declare any other components as core as
well if you wish (but you can turn these off, they just
can't fail).
......
......@@ -225,7 +225,7 @@ createBuiltinVersionResponse(const qid_t qid, vector<uint8_t>& data) {
message.setHeaderFlag(Message::HEADERFLAG_AA);
RRsetPtr rrset_version = RRsetPtr(new RRset(version_name, RRClass::CH(),
RRType::TXT(), RRTTL(0)));
rrset_version->addRdata(generic::TXT(PACKAGE_STRING));
rrset_version->addRdata(generic::TXT("\"" PACKAGE_STRING "\""));
message.addRRset(Message::SECTION_ANSWER, rrset_version);
RRsetPtr rrset_version_ns = RRsetPtr(new RRset(apex_name, RRClass::CH(),
......
......@@ -193,7 +193,8 @@ class ZonemgrRefresh:
def zone_handle_notify(self, zone_name_class, master):
"""Handle zone notify"""
if (self._zone_not_exist(zone_name_class)):
logger.error(ZONEMGR_UNKNOWN_ZONE_NOTIFIED, zone_name_class[0], zone_name_class[1])
logger.error(ZONEMGR_UNKNOWN_ZONE_NOTIFIED, zone_name_class[0],
zone_name_class[1], master)
raise ZonemgrException("[b10-zonemgr] Notified zone (%s, %s) "
"doesn't belong to zonemgr" % zone_name_class)
self._set_zone_notifier_master(zone_name_class, master)
......
......@@ -138,7 +138,7 @@ zone, or, if this error appears without the administrator giving transfer
commands, it can indicate an error in the program, as it should not have
initiated transfers of unknown zones on its own.
% ZONEMGR_UNKNOWN_ZONE_NOTIFIED notified zone %1 (class %2) is not known to the zone manager
% ZONEMGR_UNKNOWN_ZONE_NOTIFIED notified zone %1/%2 from %3 is not known to the zone manager
A NOTIFY was received but the zone that was the subject of the operation
is not being managed by the zone manager. This may indicate an error
in the program (as the operation should not have been initiated if this
......
......@@ -40,7 +40,7 @@ libb10_dhcp___la_LIBADD = $(top_builddir)/src/lib/asiolink/libb10-asiolink.la
libb10_dhcp___la_LIBADD += $(top_builddir)/src/lib/util/libb10-util.la
libb10_dhcp___la_LDFLAGS = -no-undefined -version-info 2:0:0
EXTRA_DIST = README
EXTRA_DIST = README libdhcp++.dox
if USE_CLANGPP
# Disable unused parameter warning caused by some of the
......
......@@ -48,5 +48,5 @@ libb10_dhcpsrv_la_CXXFLAGS += -Wno-unused-parameter
endif
# Distribute MySQL schema creation script and backend documentation
EXTRA_DIST = dhcpdb_create.mysql database_backends.dox
EXTRA_DIST = dhcpdb_create.mysql database_backends.dox libdhcpsrv.dox
dist_pkgdata_DATA = dhcpdb_create.mysql
......@@ -21,6 +21,8 @@ EXTRA_DIST += rdata/ch_3/a_1.cc
EXTRA_DIST += rdata/ch_3/a_1.h
EXTRA_DIST += rdata/generic/cname_5.cc
EXTRA_DIST += rdata/generic/cname_5.h
EXTRA_DIST += rdata/generic/detail/char_string.cc
EXTRA_DIST += rdata/generic/detail/char_string.h
EXTRA_DIST += rdata/generic/detail/nsec_bitmap.cc
EXTRA_DIST += rdata/generic/detail/nsec_bitmap.h
EXTRA_DIST += rdata/generic/detail/nsec3param_common.cc
......@@ -123,6 +125,8 @@ libb10_dns___la_SOURCES += tsigrecord.h tsigrecord.cc
libb10_dns___la_SOURCES += character_string.h character_string.cc
libb10_dns___la_SOURCES += master_loader_callbacks.h
libb10_dns___la_SOURCES += master_loader.h
libb10_dns___la_SOURCES += rdata/generic/detail/char_string.h
libb10_dns___la_SOURCES += rdata/generic/detail/char_string.cc
libb10_dns___la_SOURCES += rdata/generic/detail/nsec_bitmap.h
libb10_dns___la_SOURCES += rdata/generic/detail/nsec_bitmap.cc
libb10_dns___la_SOURCES += rdata/generic/detail/nsec3param_common.cc
......
......@@ -32,7 +32,8 @@ import sys
#
# Example:
# new_rdata_factory_users = [('a', 'in'), ('a', 'ch'), ('soa', 'generic')]
new_rdata_factory_users = []
new_rdata_factory_users = [('aaaa', 'in'), ('txt', 'generic'),
('spf', 'generic')]
re_typecode = re.compile('([\da-z]+)_(\d+)')
classcode2txt = {}
......@@ -126,6 +127,9 @@ class AbstractMessageRenderer;\n\n'''
explicit ''' + type_utxt + '''(const std::string& type_str);
''' + type_utxt + '''(isc::util::InputBuffer& buffer, size_t rdata_len);
''' + type_utxt + '''(const ''' + type_utxt + '''& other);
''' + type_utxt + '''(
MasterLexer& lexer, const Name* name,
MasterLoader::Options options, MasterLoaderCallbacks& callbacks);
virtual std::string toText() const;
virtual void toWire(isc::util::OutputBuffer& buffer) const;
virtual void toWire(AbstractMessageRenderer& renderer) const;
......@@ -213,17 +217,33 @@ def generate_rdatadef(file, basemtime):
rdata_deffile.write(class_definitions)
rdata_deffile.close()
def generate_rdatahdr(file, declarations, basemtime):
def generate_rdatahdr(file, heading, declarations, basemtime):
if not need_generate(file, basemtime):
print('skip generating ' + file);
return
heading += '''
#ifndef DNS_RDATACLASS_H
#define DNS_RDATACLASS_H 1
#include <dns/master_loader.h>
namespace isc {
namespace dns {
class Name;
class MasterLexer;
class MasterLoaderCallbacks;
}
}
'''
declarations += '''
#endif // DNS_RDATACLASS_H
// Local Variables:
// mode: c++
// End:
'''
rdata_header = open(file, 'w')
rdata_header.write(heading_txt)
rdata_header.write(heading)
rdata_header.write(declarations)
rdata_header.close()
......@@ -320,8 +340,8 @@ if __name__ == "__main__":
try:
import_definitions(classcode2txt, typecode2txt, typeandclass)
generate_rdatadef('@builddir@/rdataclass.cc', rdatadef_mtime)
generate_rdatahdr('@builddir@/rdataclass.h', rdata_declarations,
rdatahdr_mtime)
generate_rdatahdr('@builddir@/rdataclass.h', heading_txt,
rdata_declarations, rdatahdr_mtime)
generate_typeclasscode('rrtype', rdatahdr_mtime, typecode2txt, 'Type')
generate_typeclasscode('rrclass', classdir_mtime,
classcode2txt, 'Class')
......
......@@ -36,7 +36,7 @@ using namespace master_lexer_internal;
struct MasterLexer::MasterLexerImpl {
MasterLexerImpl() : source_(NULL), token_(Token::NOT_STARTED),
MasterLexerImpl() : source_(NULL), token_(MasterToken::NOT_STARTED),
paren_count_(0), last_was_eol_(false),
has_previous_(false),
previous_paren_count_(0),
......@@ -82,7 +82,7 @@ struct MasterLexer::MasterLexerImpl {
std::vector<InputSourcePtr> sources_;
InputSource* source_; // current source (NULL if sources_ is empty)
Token token_; // currently recognized token (set by a state)
MasterToken token_; // currently recognized token (set by a state)
std::vector<char> data_; // placeholder for string data
// These are used in states, and defined here only as a placeholder.
......@@ -165,9 +165,8 @@ MasterLexer::getSourceLine() const {
return (impl_->sources_.back()->getCurrentLine());
}
const MasterLexer::Token&
const MasterToken&
MasterLexer::getNextToken(Options options) {
// If the source is not available
if (impl_->source_ == NULL) {
isc_throw(isc::InvalidOperation, "No source to read tokens from");
}
......@@ -178,7 +177,7 @@ MasterLexer::getNextToken(Options options) {
impl_->has_previous_ = true;
// Reset the token now. This is to check a token was actually produced.
// This is debugging aid.
impl_->token_ = Token(Token::NO_TOKEN_PRODUCED);
impl_->token_ = MasterToken(MasterToken::NO_TOKEN_PRODUCED);
// And get the token
// This actually handles EOF internally too.
......@@ -188,8 +187,62 @@ MasterLexer::getNextToken(Options options) {
}
// Make sure a token was produced. Since this Can Not Happen, we assert
// here instead of throwing.
assert(impl_->token_.getType() != Token::ERROR ||
impl_->token_.getErrorCode() != Token::NO_TOKEN_PRODUCED);
assert(impl_->token_.getType() != MasterToken::ERROR ||
impl_->token_.getErrorCode() != MasterToken::NO_TOKEN_PRODUCED);
return (impl_->token_);
}
namespace {
inline MasterLexer::Options
optionsForTokenType(MasterToken::Type expect) {
switch (expect) {
case MasterToken::STRING:
return (MasterLexer::NONE);
case MasterToken::QSTRING:
return (MasterLexer::QSTRING);
case MasterToken::NUMBER:
return (MasterLexer::NUMBER);
default:
isc_throw(InvalidParameter,
"expected type for getNextToken not supported: " << expect);
}
}
}
const MasterToken&
MasterLexer::getNextToken(MasterToken::Type expect, bool eol_ok) {
// Get the next token, specifying an appropriate option corresponding to
// the expected type. The result should be set in impl_->token_.
getNextToken(optionsForTokenType(expect));
if (impl_->token_.getType() == MasterToken::ERROR) {
if (impl_->token_.getErrorCode() == MasterToken::NUMBER_OUT_OF_RANGE) {
ungetToken();
}
throw LexerError(__FILE__, __LINE__, impl_->token_);
}
const bool is_eol_like =
(impl_->token_.getType() == MasterToken::END_OF_LINE ||
impl_->token_.getType() == MasterToken::END_OF_FILE);
if (eol_ok && is_eol_like) {
return (impl_->token_);
}
if (impl_->token_.getType() == MasterToken::STRING &&
expect == MasterToken::QSTRING) {
return (impl_->token_);
}
if (impl_->token_.getType() != expect) {
ungetToken();
if (is_eol_like) {
throw LexerError(__FILE__, __LINE__,
MasterToken(MasterToken::UNEXPECTED_END));
}
assert(expect == MasterToken::NUMBER);
throw LexerError(__FILE__, __LINE__,
MasterToken(MasterToken::BAD_NUMBER));
}
return (impl_->token_);
}
......@@ -212,16 +265,17 @@ const char* const error_text[] = {
"unexpected end of input", // UNEXPECTED_END
"unbalanced quotes", // UNBALANCED_QUOTES
"no token produced", // NO_TOKEN_PRODUCED
"number out of range" // NUMBER_OUT_OF_RANGE
"number out of range", // NUMBER_OUT_OF_RANGE
"not a valid number" // BAD_NUMBER
};
const size_t error_text_max_count = sizeof(error_text) / sizeof(error_text[0]);
} // end unnamed namespace
std::string
MasterLexer::Token::getErrorText() const {
MasterToken::getErrorText() const {
if (type_ != ERROR) {
isc_throw(InvalidOperation,
"Token::getErrorText() for non error type");
"MasterToken::getErrorText() for non error type");
}
// The class integrity ensures the following:
......@@ -234,14 +288,12 @@ namespace master_lexer_internal {
// Note that these need to be defined here so that they can refer to
// the details of MasterLexerImpl.
typedef MasterLexer::Token Token; // convenience shortcut
bool
State::wasLastEOL(const MasterLexer& lexer) const {
return (lexer.impl_->last_was_eol_);
}
const MasterLexer::Token&
const MasterToken&
State::getToken(const MasterLexer& lexer) const {
return (lexer.impl_->token_);
}
......@@ -271,7 +323,7 @@ public:
if (c != '\n') {
getLexerImpl(lexer)->source_->ungetChar();
}
getLexerImpl(lexer)->token_ = Token(Token::END_OF_LINE);
getLexerImpl(lexer)->token_ = MasterToken(MasterToken::END_OF_LINE);
getLexerImpl(lexer)->last_was_eol_ = true;
}
};
......@@ -342,24 +394,24 @@ State::start(MasterLexer& lexer, MasterLexer::Options options) {
if (c == InputSource::END_OF_STREAM) {
lexerimpl.last_was_eol_ = false;
if (paren_count != 0) {
lexerimpl.token_ = Token(Token::UNBALANCED_PAREN);
lexerimpl.token_ = MasterToken(MasterToken::UNBALANCED_PAREN);
paren_count = 0; // reset to 0; this helps in lenient mode.
return (NULL);
}
lexerimpl.token_ = Token(Token::END_OF_FILE);
lexerimpl.token_ = MasterToken(MasterToken::END_OF_FILE);
return (NULL);
} else if (c == ' ' || c == '\t') {
// If requested and we are not in (), recognize the initial space.
if (lexerimpl.last_was_eol_ && paren_count == 0 &&
(options & MasterLexer::INITIAL_WS) != 0) {
lexerimpl.last_was_eol_ = false;
lexerimpl.token_ = Token(Token::INITIAL_WS);
lexerimpl.token_ = MasterToken(MasterToken::INITIAL_WS);
return (NULL);
}
} else if (c == '\n') {
lexerimpl.last_was_eol_ = true;
if (paren_count == 0) { // we don't recognize EOL if we are in ()
lexerimpl.token_ = Token(Token::END_OF_LINE);
lexerimpl.token_ = MasterToken(MasterToken::END_OF_LINE);
return (NULL);
}
} else if (c == '\r') {
......@@ -375,7 +427,7 @@ State::start(MasterLexer& lexer, MasterLexer::Options options) {
} else if (c == ')') {
lexerimpl.last_was_eol_ = false;
if (paren_count == 0) {
lexerimpl.token_ = Token(Token::UNBALANCED_PAREN);
lexerimpl.token_ = MasterToken(MasterToken::UNBALANCED_PAREN);
return (NULL);
}
--paren_count;
......@@ -406,8 +458,11 @@ String::handle(MasterLexer& lexer) const {
if (getLexerImpl(lexer)->isTokenEnd(c, escaped)) {
getLexerImpl(lexer)->source_->ungetChar();
// make sure it nul-terminated as a c-str (excluded from token
// data).
data.push_back('\0');
getLexerImpl(lexer)->token_ =
MasterLexer::Token(&data.at(0), data.size());
MasterToken(&data.at(0), data.size() - 1);
return;
}
escaped = (c == '\\' && !escaped);
......@@ -417,7 +472,7 @@ String::handle(MasterLexer& lexer) const {
void
QString::handle(MasterLexer& lexer) const {
MasterLexer::Token& token = getLexerImpl(lexer)->token_;
MasterToken& token = getLexerImpl(lexer)->token_;
std::vector<char>& data = getLexerImpl(lexer)->data_;
data.clear();
......@@ -425,7 +480,7 @@ QString::handle(MasterLexer& lexer) const {
while (true) {
const int c = getLexerImpl(lexer)->source_->getChar();
if (c == InputSource::END_OF_STREAM) {
token = Token(Token::UNEXPECTED_END);
token = MasterToken(MasterToken::UNEXPECTED_END);
return;
} else if (c == '"') {
if (escaped) {
......@@ -434,12 +489,15 @@ QString::handle(MasterLexer& lexer) const {
escaped = false;
data.back() = '"';
} else {
token = MasterLexer::Token(&data.at(0), data.size(), true);
// make sure it nul-terminated as a c-str (excluded from token
// data). This also simplifies the case of an empty string.
data.push_back('\0');
token = MasterToken(&data.at(0), data.size() - 1, true);
return;
}
} else if (c == '\n' && !escaped) {
getLexerImpl(lexer)->source_->ungetChar();
token = Token(Token::UNBALANCED_QUOTES);
token = MasterToken(MasterToken::UNBALANCED_QUOTES);
return;
} else {
escaped = (c == '\\' && !escaped);
......@@ -450,7 +508,7 @@ QString::handle(MasterLexer& lexer) const {
void
Number::handle(MasterLexer& lexer) const {
MasterLexer::Token& token = getLexerImpl(lexer)->token_;
MasterToken& token = getLexerImpl(lexer)->token_;
// It may yet turn out to be a string, so we first
// collect all the data
......@@ -464,21 +522,21 @@ Number::handle(MasterLexer& lexer) const {
getLexerImpl(lexer)->source_->getChar(), escaped);
if (getLexerImpl(lexer)->isTokenEnd(c, escaped)) {
getLexerImpl(lexer)->source_->ungetChar();
// We need to close the string whether it's digits-only (for
// lexical_cast) or not (see String::handle()).
data.push_back('\0');
if (digits_only) {
// Close the string for lexical_cast
data.push_back('\0');
try {
const uint32_t number32 =
boost::lexical_cast<uint32_t, const char*>(&data[0]);
token = MasterLexer::Token(number32);
token = MasterToken(number32);
} catch (const boost::bad_lexical_cast&) {
// Since we already know we have only digits,
// range should be the only possible problem.
token = Token(Token::NUMBER_OUT_OF_RANGE);
token = MasterToken(MasterToken::NUMBER_OUT_OF_RANGE);
}
} else {
token = MasterLexer::Token(&data.at(0),
data.size());
token = MasterToken(&data.at(0), data.size() - 1);
}
return;
}
......
This diff is collapsed.
......@@ -43,10 +43,10 @@ namespace master_lexer_internal {
/// state, so it makes more sense to separate the interface for the transition
/// from the initial state.
///
/// When an object of a specific state class completes the session, it
/// normally sets the identified token in the lexer, and returns NULL;
/// if more transition is necessary, it returns a pointer to the next state
/// object.
/// If the whole lexer transition is completed within start(), it sets the
/// identified token and returns NULL; otherwise it returns a pointer to
/// an object of a specific state class that completes the session
/// on the call of handle().
///
/// As is usual in the state design pattern, the \c State class is made
/// a friend class of \c MasterLexer and can refer to its internal details.
......@@ -119,7 +119,7 @@ public:
/// purposes.
///@{
bool wasLastEOL(const MasterLexer& lexer) const;
const MasterLexer::Token& getToken(const MasterLexer& lexer) const;
const MasterToken& getToken(const MasterLexer& lexer) const;
size_t getParenCount(const MasterLexer& lexer) const;
///@}
......
......@@ -38,7 +38,7 @@ class RdataTest(unittest.TestCase):
self.assertRaises(InvalidRdataText, Rdata, RRType("A"), RRClass("IN"),
"Invalid Rdata Text")
self.assertRaises(CharStringTooLong, Rdata, RRType("TXT"),
RRClass("IN"), ' ' * 256)
RRClass("IN"), 'x' * 256)
self.assertRaises(InvalidRdataLength, Rdata, RRType("TXT"),
RRClass("IN"), bytes(65536))
self.assertRaises(DNSMessageFORMERR, Rdata, RRType("TXT"),
......
......@@ -12,6 +12,20 @@
// OR OTHER TORTIOUS ACTION, ARISING OUT OF OR IN CONNECTION WITH THE USE OR
// PERFORMANCE OF THIS SOFTWARE.
#include <exceptions/exceptions.h>
#include <util/buffer.h>
#include <dns/name.h>
#include <dns/messagerenderer.h>
#include <dns/master_lexer.h>
#include <dns/rdata.h>
#include <dns/rrparamregistry.h>
#include <dns/rrtype.h>
#include <boost/lexical_cast.hpp>
#include <boost/shared_ptr.hpp>
#include <algorithm>
#include <cctype>
#include <string>
......@@ -24,16 +38,6 @@
#include <stdint.h>
#include <string.h>
#include <boost/lexical_cast.hpp>
#include <boost/shared_ptr.hpp>
#include <util/buffer.h>
#include <dns/name.h>
#include <dns/messagerenderer.h>
#include <dns/rdata.h>
#include <dns/rrparamregistry.h>
#include <dns/rrtype.h>
using namespace std;
using boost::lexical_cast;
using namespace isc::util;
......@@ -81,23 +85,92 @@ createRdata(const RRType& rrtype, const RRClass& rrclass, const Rdata& source)
source));
}
namespace {
void
fromtextError(bool& error_issued, const MasterLexer& lexer,
MasterLoaderCallbacks& callbacks,
const MasterToken* token, const char* reason)
{
// Don't be too noisy if there are many issues for single RDATA
if (error_issued) {
return;
}
error_issued = true;
if (token == NULL) {
callbacks.error(lexer.getSourceName(), lexer.getSourceLine(),
"createRdata from text failed: " + string(reason));
return;
}
switch (token->getType()) {
case MasterToken::STRING:
case MasterToken::QSTRING:
callbacks.error(lexer.getSourceName(), lexer.getSourceLine(),
"createRdata from text failed near '" +
token->getString() + "': " + string(reason));
break;
case MasterToken::ERROR:
callbacks.error(lexer.getSourceName(), lexer.getSourceLine(),
"createRdata from text failed: " +
token->getErrorText());
break;
default:
// This case shouldn't happen based on how we use MasterLexer in
// createRdata(), so we could assert() that here. But since it
// depends on detailed behavior of other classes, we treat the case
// in a bit less harsh way.
isc_throw(Unexpected, "bug: createRdata() saw unexpected token type");
}
}
}
RdataPtr
createRdata(const RRType& rrtype, const RRClass& rrclass,
MasterLexer& lexer, const Name* origin,
MasterLoader::Options options,
MasterLoaderCallbacks& callbacks)
{
RdataPtr ret;
RdataPtr rdata;
bool error_issued = false;
try {
ret = RRParamRegistry::getRegistry().createRdata(rrtype, rrclass,
lexer, origin,
options, callbacks);
} catch (...) {
// ret is NULL here.
rdata = RRParamRegistry::getRegistry().createRdata(
rrtype, rrclass, lexer, origin, options, callbacks);
} catch (const MasterLexer::LexerError& error) {
fromtextError(error_issued, lexer, callbacks, &error.token_, "");
} catch (const Exception& ex) {
// Catching all isc::Exception is too broad, but right now we don't
// have better granularity. When we complete #2518 we can make this
// finer.
fromtextError(error_issued, lexer, callbacks, NULL, ex.what());
}
// Other exceptions mean a serious implementation bug or fatal system
// error; it doesn't make sense to catch and try to recover from them
// here. Just propagate.
// Consume to end of line / file.
// Call callback via fromtextError once if there was an error.
do {
const MasterToken& token = lexer.getNextToken();
switch (token.getType()) {
case MasterToken::END_OF_LINE:
return (rdata);
case MasterToken::END_OF_FILE:
callbacks.warning(lexer.getSourceName(), lexer.getSourceLine(),
"file does not end with newline");
return (rdata);
default:
rdata.reset(); // we'll return NULL
fromtextError(error_issued, lexer, callbacks, &token,
"extra input text");
// Continue until we see EOL or EOF
}
} while (true);
return (ret);
// We shouldn't reach here
assert(false);
return (RdataPtr()); // add explicit return to silence some compilers
}
int
......@@ -211,9 +284,10 @@ Generic::Generic(MasterLexer& lexer, const Name*,
std::string s;
while (true) {
const MasterLexer::Token& token = lexer.getNextToken();