Commit e19573d7 authored by Stephen Morris's avatar Stephen Morris
Browse files

[2513] Merge branch 'master' into trac2513

parents 1d4751ed 4e8325e1
......@@ -1308,7 +1308,7 @@ AC_CONFIG_FILES([Makefile
tests/tools/badpacket/tests/Makefile
tests/tools/perfdhcp/Makefile
tests/tools/perfdhcp/tests/Makefile
tests/tools/perfdhcp/templates/Makefile
tests/tools/perfdhcp/tests/testdata/Makefile
dns++.pc
])
AC_OUTPUT([doc/version.ent
......
......@@ -32,7 +32,7 @@ import sys
#
# Example:
# new_rdata_factory_users = [('a', 'in'), ('a', 'ch'), ('soa', 'generic')]
new_rdata_factory_users = []
new_rdata_factory_users = [('aaaa', 'in')]
re_typecode = re.compile('([\da-z]+)_(\d+)')
classcode2txt = {}
......@@ -126,6 +126,9 @@ class AbstractMessageRenderer;\n\n'''
explicit ''' + type_utxt + '''(const std::string& type_str);
''' + type_utxt + '''(isc::util::InputBuffer& buffer, size_t rdata_len);
''' + type_utxt + '''(const ''' + type_utxt + '''& other);
''' + type_utxt + '''(
MasterLexer& lexer, const Name* name,
MasterLoader::Options options, MasterLoaderCallbacks& callbacks);
virtual std::string toText() const;
virtual void toWire(isc::util::OutputBuffer& buffer) const;
virtual void toWire(AbstractMessageRenderer& renderer) const;
......@@ -213,17 +216,33 @@ def generate_rdatadef(file, basemtime):
rdata_deffile.write(class_definitions)
rdata_deffile.close()
def generate_rdatahdr(file, declarations, basemtime):
def generate_rdatahdr(file, heading, declarations, basemtime):
if not need_generate(file, basemtime):
print('skip generating ' + file);
return
heading += '''
#ifndef DNS_RDATACLASS_H
#define DNS_RDATACLASS_H 1
#include <dns/master_loader.h>
namespace isc {
namespace dns {
class Name;
class MasterLexer;
class MasterLoaderCallbacks;
}
}
'''
declarations += '''
#endif // DNS_RDATACLASS_H
// Local Variables:
// mode: c++
// End:
'''
rdata_header = open(file, 'w')
rdata_header.write(heading_txt)
rdata_header.write(heading)
rdata_header.write(declarations)
rdata_header.close()
......@@ -320,8 +339,8 @@ if __name__ == "__main__":
try:
import_definitions(classcode2txt, typecode2txt, typeandclass)
generate_rdatadef('@builddir@/rdataclass.cc', rdatadef_mtime)
generate_rdatahdr('@builddir@/rdataclass.h', rdata_declarations,
rdatahdr_mtime)
generate_rdatahdr('@builddir@/rdataclass.h', heading_txt,
rdata_declarations, rdatahdr_mtime)
generate_typeclasscode('rrtype', rdatahdr_mtime, typecode2txt, 'Type')
generate_typeclasscode('rrclass', classdir_mtime,
classcode2txt, 'Class')
......
......@@ -36,7 +36,7 @@ using namespace master_lexer_internal;
struct MasterLexer::MasterLexerImpl {
MasterLexerImpl() : source_(NULL), token_(Token::NOT_STARTED),
MasterLexerImpl() : source_(NULL), token_(MasterToken::NOT_STARTED),
paren_count_(0), last_was_eol_(false),
has_previous_(false),
previous_paren_count_(0),
......@@ -82,7 +82,7 @@ struct MasterLexer::MasterLexerImpl {
std::vector<InputSourcePtr> sources_;
InputSource* source_; // current source (NULL if sources_ is empty)
Token token_; // currently recognized token (set by a state)
MasterToken token_; // currently recognized token (set by a state)
std::vector<char> data_; // placeholder for string data
// These are used in states, and defined here only as a placeholder.
......@@ -165,9 +165,8 @@ MasterLexer::getSourceLine() const {
return (impl_->sources_.back()->getCurrentLine());
}
const MasterLexer::Token&
const MasterToken&
MasterLexer::getNextToken(Options options) {
// If the source is not available
if (impl_->source_ == NULL) {
isc_throw(isc::InvalidOperation, "No source to read tokens from");
}
......@@ -178,7 +177,7 @@ MasterLexer::getNextToken(Options options) {
impl_->has_previous_ = true;
// Reset the token now. This is to check a token was actually produced.
// This is debugging aid.
impl_->token_ = Token(Token::NO_TOKEN_PRODUCED);
impl_->token_ = MasterToken(MasterToken::NO_TOKEN_PRODUCED);
// And get the token
// This actually handles EOF internally too.
......@@ -188,8 +187,62 @@ MasterLexer::getNextToken(Options options) {
}
// Make sure a token was produced. Since this Can Not Happen, we assert
// here instead of throwing.
assert(impl_->token_.getType() != Token::ERROR ||
impl_->token_.getErrorCode() != Token::NO_TOKEN_PRODUCED);
assert(impl_->token_.getType() != MasterToken::ERROR ||
impl_->token_.getErrorCode() != MasterToken::NO_TOKEN_PRODUCED);
return (impl_->token_);
}
namespace {
inline MasterLexer::Options
optionsForTokenType(MasterToken::Type expect) {
switch (expect) {
case MasterToken::STRING:
return (MasterLexer::NONE);
case MasterToken::QSTRING:
return (MasterLexer::QSTRING);
case MasterToken::NUMBER:
return (MasterLexer::NUMBER);
default:
isc_throw(InvalidParameter,
"expected type for getNextToken not supported: " << expect);
}
}
}
const MasterToken&
MasterLexer::getNextToken(MasterToken::Type expect, bool eol_ok) {
// Get the next token, specifying an appropriate option corresponding to
// the expected type. The result should be set in impl_->token_.
getNextToken(optionsForTokenType(expect));
if (impl_->token_.getType() == MasterToken::ERROR) {
if (impl_->token_.getErrorCode() == MasterToken::NUMBER_OUT_OF_RANGE) {
ungetToken();
}
throw LexerError(__FILE__, __LINE__, impl_->token_);
}
const bool is_eol_like =
(impl_->token_.getType() == MasterToken::END_OF_LINE ||
impl_->token_.getType() == MasterToken::END_OF_FILE);
if (eol_ok && is_eol_like) {
return (impl_->token_);
}
if (impl_->token_.getType() == MasterToken::STRING &&
expect == MasterToken::QSTRING) {
return (impl_->token_);
}
if (impl_->token_.getType() != expect) {
ungetToken();
if (is_eol_like) {
throw LexerError(__FILE__, __LINE__,
MasterToken(MasterToken::UNEXPECTED_END));
}
assert(expect == MasterToken::NUMBER);
throw LexerError(__FILE__, __LINE__,
MasterToken(MasterToken::BAD_NUMBER));
}
return (impl_->token_);
}
......@@ -212,16 +265,17 @@ const char* const error_text[] = {
"unexpected end of input", // UNEXPECTED_END
"unbalanced quotes", // UNBALANCED_QUOTES
"no token produced", // NO_TOKEN_PRODUCED
"number out of range" // NUMBER_OUT_OF_RANGE
"number out of range", // NUMBER_OUT_OF_RANGE
"not a valid number" // BAD_NUMBER
};
const size_t error_text_max_count = sizeof(error_text) / sizeof(error_text[0]);
} // end unnamed namespace
std::string
MasterLexer::Token::getErrorText() const {
MasterToken::getErrorText() const {
if (type_ != ERROR) {
isc_throw(InvalidOperation,
"Token::getErrorText() for non error type");
"MasterToken::getErrorText() for non error type");
}
// The class integrity ensures the following:
......@@ -234,14 +288,12 @@ namespace master_lexer_internal {
// Note that these need to be defined here so that they can refer to
// the details of MasterLexerImpl.
typedef MasterLexer::Token Token; // convenience shortcut
bool
State::wasLastEOL(const MasterLexer& lexer) const {
return (lexer.impl_->last_was_eol_);
}
const MasterLexer::Token&
const MasterToken&
State::getToken(const MasterLexer& lexer) const {
return (lexer.impl_->token_);
}
......@@ -271,7 +323,7 @@ public:
if (c != '\n') {
getLexerImpl(lexer)->source_->ungetChar();
}
getLexerImpl(lexer)->token_ = Token(Token::END_OF_LINE);
getLexerImpl(lexer)->token_ = MasterToken(MasterToken::END_OF_LINE);
getLexerImpl(lexer)->last_was_eol_ = true;
}
};
......@@ -342,24 +394,24 @@ State::start(MasterLexer& lexer, MasterLexer::Options options) {
if (c == InputSource::END_OF_STREAM) {
lexerimpl.last_was_eol_ = false;
if (paren_count != 0) {
lexerimpl.token_ = Token(Token::UNBALANCED_PAREN);
lexerimpl.token_ = MasterToken(MasterToken::UNBALANCED_PAREN);
paren_count = 0; // reset to 0; this helps in lenient mode.
return (NULL);
}
lexerimpl.token_ = Token(Token::END_OF_FILE);
lexerimpl.token_ = MasterToken(MasterToken::END_OF_FILE);
return (NULL);
} else if (c == ' ' || c == '\t') {
// If requested and we are not in (), recognize the initial space.
if (lexerimpl.last_was_eol_ && paren_count == 0 &&
(options & MasterLexer::INITIAL_WS) != 0) {
lexerimpl.last_was_eol_ = false;
lexerimpl.token_ = Token(Token::INITIAL_WS);
lexerimpl.token_ = MasterToken(MasterToken::INITIAL_WS);
return (NULL);
}
} else if (c == '\n') {
lexerimpl.last_was_eol_ = true;
if (paren_count == 0) { // we don't recognize EOL if we are in ()
lexerimpl.token_ = Token(Token::END_OF_LINE);
lexerimpl.token_ = MasterToken(MasterToken::END_OF_LINE);
return (NULL);
}
} else if (c == '\r') {
......@@ -375,7 +427,7 @@ State::start(MasterLexer& lexer, MasterLexer::Options options) {
} else if (c == ')') {
lexerimpl.last_was_eol_ = false;
if (paren_count == 0) {
lexerimpl.token_ = Token(Token::UNBALANCED_PAREN);
lexerimpl.token_ = MasterToken(MasterToken::UNBALANCED_PAREN);
return (NULL);
}
--paren_count;
......@@ -406,8 +458,11 @@ String::handle(MasterLexer& lexer) const {
if (getLexerImpl(lexer)->isTokenEnd(c, escaped)) {
getLexerImpl(lexer)->source_->ungetChar();
// make sure it nul-terminated as a c-str (excluded from token
// data).
data.push_back('\0');
getLexerImpl(lexer)->token_ =
MasterLexer::Token(&data.at(0), data.size());
MasterToken(&data.at(0), data.size() - 1);
return;
}
escaped = (c == '\\' && !escaped);
......@@ -417,7 +472,7 @@ String::handle(MasterLexer& lexer) const {
void
QString::handle(MasterLexer& lexer) const {
MasterLexer::Token& token = getLexerImpl(lexer)->token_;
MasterToken& token = getLexerImpl(lexer)->token_;
std::vector<char>& data = getLexerImpl(lexer)->data_;
data.clear();
......@@ -425,7 +480,7 @@ QString::handle(MasterLexer& lexer) const {
while (true) {
const int c = getLexerImpl(lexer)->source_->getChar();
if (c == InputSource::END_OF_STREAM) {
token = Token(Token::UNEXPECTED_END);
token = MasterToken(MasterToken::UNEXPECTED_END);
return;
} else if (c == '"') {
if (escaped) {
......@@ -434,12 +489,15 @@ QString::handle(MasterLexer& lexer) const {
escaped = false;
data.back() = '"';
} else {
token = MasterLexer::Token(&data.at(0), data.size(), true);
// make sure it nul-terminated as a c-str (excluded from token
// data). This also simplifies the case of an empty string.
data.push_back('\0');
token = MasterToken(&data.at(0), data.size() - 1, true);
return;
}
} else if (c == '\n' && !escaped) {
getLexerImpl(lexer)->source_->ungetChar();
token = Token(Token::UNBALANCED_QUOTES);
token = MasterToken(MasterToken::UNBALANCED_QUOTES);
return;
} else {
escaped = (c == '\\' && !escaped);
......@@ -450,7 +508,7 @@ QString::handle(MasterLexer& lexer) const {
void
Number::handle(MasterLexer& lexer) const {
MasterLexer::Token& token = getLexerImpl(lexer)->token_;
MasterToken& token = getLexerImpl(lexer)->token_;
// It may yet turn out to be a string, so we first
// collect all the data
......@@ -464,21 +522,21 @@ Number::handle(MasterLexer& lexer) const {
getLexerImpl(lexer)->source_->getChar(), escaped);
if (getLexerImpl(lexer)->isTokenEnd(c, escaped)) {
getLexerImpl(lexer)->source_->ungetChar();
// We need to close the string whether it's digits-only (for
// lexical_cast) or not (see String::handle()).
data.push_back('\0');
if (digits_only) {
// Close the string for lexical_cast
data.push_back('\0');
try {
const uint32_t number32 =
boost::lexical_cast<uint32_t, const char*>(&data[0]);
token = MasterLexer::Token(number32);
token = MasterToken(number32);
} catch (const boost::bad_lexical_cast&) {
// Since we already know we have only digits,
// range should be the only possible problem.
token = Token(Token::NUMBER_OUT_OF_RANGE);
token = MasterToken(MasterToken::NUMBER_OUT_OF_RANGE);
}
} else {
token = MasterLexer::Token(&data.at(0),
data.size());
token = MasterToken(&data.at(0), data.size() - 1);
}
return;
}
......
This diff is collapsed.
......@@ -43,10 +43,10 @@ namespace master_lexer_internal {
/// state, so it makes more sense to separate the interface for the transition
/// from the initial state.
///
/// When an object of a specific state class completes the session, it
/// normally sets the identified token in the lexer, and returns NULL;
/// if more transition is necessary, it returns a pointer to the next state
/// object.
/// If the whole lexer transition is completed within start(), it sets the
/// identified token and returns NULL; otherwise it returns a pointer to
/// an object of a specific state class that completes the session
/// on the call of handle().
///
/// As is usual in the state design pattern, the \c State class is made
/// a friend class of \c MasterLexer and can refer to its internal details.
......@@ -119,7 +119,7 @@ public:
/// purposes.
///@{
bool wasLastEOL(const MasterLexer& lexer) const;
const MasterLexer::Token& getToken(const MasterLexer& lexer) const;
const MasterToken& getToken(const MasterLexer& lexer) const;
size_t getParenCount(const MasterLexer& lexer) const;
///@}
......
......@@ -12,6 +12,20 @@
// OR OTHER TORTIOUS ACTION, ARISING OUT OF OR IN CONNECTION WITH THE USE OR
// PERFORMANCE OF THIS SOFTWARE.
#include <exceptions/exceptions.h>
#include <util/buffer.h>
#include <dns/name.h>
#include <dns/messagerenderer.h>
#include <dns/master_lexer.h>
#include <dns/rdata.h>
#include <dns/rrparamregistry.h>
#include <dns/rrtype.h>
#include <boost/lexical_cast.hpp>
#include <boost/shared_ptr.hpp>
#include <algorithm>
#include <cctype>
#include <string>
......@@ -24,16 +38,6 @@
#include <stdint.h>
#include <string.h>
#include <boost/lexical_cast.hpp>
#include <boost/shared_ptr.hpp>
#include <util/buffer.h>
#include <dns/name.h>
#include <dns/messagerenderer.h>
#include <dns/rdata.h>
#include <dns/rrparamregistry.h>
#include <dns/rrtype.h>
using namespace std;
using boost::lexical_cast;
using namespace isc::util;
......@@ -81,23 +85,92 @@ createRdata(const RRType& rrtype, const RRClass& rrclass, const Rdata& source)
source));
}
namespace {
void
fromtextError(bool& error_issued, const MasterLexer& lexer,
MasterLoaderCallbacks& callbacks,
const MasterToken* token, const char* reason)
{
// Don't be too noisy if there are many issues for single RDATA
if (error_issued) {
return;
}
error_issued = true;
if (token == NULL) {
callbacks.error(lexer.getSourceName(), lexer.getSourceLine(),
"createRdata from text failed: " + string(reason));
return;
}
switch (token->getType()) {
case MasterToken::STRING:
case MasterToken::QSTRING:
callbacks.error(lexer.getSourceName(), lexer.getSourceLine(),
"createRdata from text failed near '" +
token->getString() + "': " + string(reason));
break;
case MasterToken::ERROR:
callbacks.error(lexer.getSourceName(), lexer.getSourceLine(),
"createRdata from text failed: " +
token->getErrorText());
break;
default:
// This case shouldn't happen based on how we use MasterLexer in
// createRdata(), so we could assert() that here. But since it
// depends on detailed behavior of other classes, we treat the case
// in a bit less harsh way.
isc_throw(Unexpected, "bug: createRdata() saw unexpected token type");
}
}
}
RdataPtr
createRdata(const RRType& rrtype, const RRClass& rrclass,
MasterLexer& lexer, const Name* origin,
MasterLoader::Options options,
MasterLoaderCallbacks& callbacks)
{
RdataPtr ret;
RdataPtr rdata;
bool error_issued = false;
try {
ret = RRParamRegistry::getRegistry().createRdata(rrtype, rrclass,
lexer, origin,
options, callbacks);
} catch (...) {
// ret is NULL here.
rdata = RRParamRegistry::getRegistry().createRdata(
rrtype, rrclass, lexer, origin, options, callbacks);
} catch (const MasterLexer::LexerError& error) {
fromtextError(error_issued, lexer, callbacks, &error.token_, "");
} catch (const Exception& ex) {
// Catching all isc::Exception is too broad, but right now we don't
// have better granularity. When we complete #2518 we can make this
// finer.
fromtextError(error_issued, lexer, callbacks, NULL, ex.what());
}
// Other exceptions mean a serious implementation bug or fatal system
// error; it doesn't make sense to catch and try to recover from them
// here. Just propagate.
// Consume to end of line / file.
// Call callback via fromtextError once if there was an error.
do {
const MasterToken& token = lexer.getNextToken();
switch (token.getType()) {
case MasterToken::END_OF_LINE:
return (rdata);
case MasterToken::END_OF_FILE:
callbacks.warning(lexer.getSourceName(), lexer.getSourceLine(),
"file does not end with newline");
return (rdata);
default:
rdata.reset(); // we'll return NULL
fromtextError(error_issued, lexer, callbacks, &token,
"extra input text");
// Continue until we see EOL or EOF
}
} while (true);
return (ret);
// We shouldn't reach here
assert(false);
return (RdataPtr()); // add explicit return to silence some compilers
}
int
......@@ -211,9 +284,10 @@ Generic::Generic(MasterLexer& lexer, const Name*,
std::string s;
while (true) {
const MasterLexer::Token& token = lexer.getNextToken();
if ((token.getType() == MasterLexer::Token::END_OF_FILE) ||
(token.getType() == MasterLexer::Token::END_OF_LINE)) {
const MasterToken& token = lexer.getNextToken();
if ((token.getType() == MasterToken::END_OF_FILE) ||
(token.getType() == MasterToken::END_OF_LINE)) {
lexer.ungetToken(); // let the upper layer handle the end-of token
break;
}
......
......@@ -485,8 +485,47 @@ RdataPtr createRdata(const RRType& rrtype, const RRClass& rrclass,
RdataPtr createRdata(const RRType& rrtype, const RRClass& rrclass,
const Rdata& source);
/// \brief Create RDATA of a given pair of RR type and class from the
/// \brief Create RDATA of a given pair of RR type and class using the
/// master lexer.
///
/// This is a more generic form of factory from textual RDATA, and is mainly
/// intended to be used internally by the master file parser (\c MasterLoader)
/// of this library.
///
/// The \c lexer is expected to be at the beginning of textual RDATA of the
/// specified type and class. This function (and its underlying Rdata
/// implementations) extracts necessary tokens from the lexer and constructs
/// the RDATA from them.
///
/// Due to the intended usage of this version, this function handles error
/// cases quite differently from other versions. It internally catches
/// most of syntax and semantics errors of the input (reported as exceptions),
/// calls the corresponding callback specified by the \c callbacks parameters,
/// and returns a NULL smart pointer. If the caller rather wants to get
/// an exception in these cases, it can pass a callback that internally
/// throws on error. Some critical exceptions such as \c std::bad_alloc are
/// still propagated to the upper layer as it doesn't make sense to try
/// recovery from such a situation within this function.
///
/// Whether or not the creation succeeds, this function updates the lexer
/// until it reaches either the end of line or file, starting from the end of
/// the RDATA text (or the point of failure if the parsing fails in the
/// middle of it). The caller can therefore assume it's ready for reading
/// the next data (which is normally a subsequent RR in the zone file) on
/// return, whether or not this function succeeds.
///
/// \param rrtype An \c RRType object specifying the type/class pair.
/// \param rrclass An \c RRClass object specifying the type/class pair.
/// \param lexer A \c MasterLexer object parsing a master file for the
/// RDATA to be created
/// \param origin If non NULL, specifies the origin of any domain name fields
/// of the RDATA that are non absolute.
/// \param options Master loader options controlling how to deal with errors
/// or non critical issues in the parsed RDATA.
/// \param callbacks Callback to be called when an error or non critical issue
/// is found.
/// \return An \c RdataPtr object pointing to the created
/// \c Rdata object. Will be NULL if parsing fails.
RdataPtr createRdata(const RRType& rrtype, const RRClass& rrclass,
MasterLexer& lexer, const Name* origin,
MasterLoader::Options options,
......
......@@ -12,6 +12,15 @@
// OR OTHER TORTIOUS ACTION, ARISING OUT OF OR IN CONNECTION WITH THE USE OR
// PERFORMANCE OF THIS SOFTWARE.
#include <exceptions/exceptions.h>
#include <util/buffer.h>
#include <dns/exceptions.h>
#include <dns/messagerenderer.h>
#include <dns/rdata.h>
#include <dns/rdataclass.h>
#include <dns/master_lexer.h>
#include <dns/master_loader.h>
#include <stdint.h>
#include <string.h>
......@@ -20,14 +29,6 @@
#include <arpa/inet.h> // XXX: for inet_pton/ntop(), not exist in C++ standards
#include <sys/socket.h> // for AF_INET/AF_INET6
#include <exceptions/exceptions.h>
#include <util/buffer.h>
#include <dns/exceptions.h>
#include <dns/messagerenderer.h>
#include <dns/rdata.h>
#include <dns/rdataclass.h>
using namespace std;
using namespace isc::util;
......@@ -42,6 +43,16 @@ AAAA::AAAA(const std::string& addrstr) {
}
}
AAAA::AAAA(MasterLexer& lexer, const Name*,
MasterLoader::Options, MasterLoaderCallbacks&)
{
const MasterToken& token = lexer.getNextToken(MasterToken::STRING);
if (inet_pton(AF_INET6, token.getStringRegion().beg, &addr_) != 1) {
isc_throw(InvalidRdataText, "Failed to convert '"
<< token.getString() << "' to IN/AAAA RDATA");
}
}
AAAA::AAAA(InputBuffer& buffer, size_t rdata_len) {
if (rdata_len != sizeof(addr_)) {
isc_throw(DNSMessageFORMERR,
......
......@@ -34,6 +34,11 @@ using namespace isc::util;
// If you added member functions specific to this derived class, you'll need
// to implement them here, of course.