Skip to content
GitLab
Menu
Projects
Groups
Snippets
Loading...
Help
Help
Support
Community forum
Keyboard shortcuts
?
Submit feedback
Contribute to GitLab
Sign in / Register
Toggle navigation
Menu
Open sidebar
Sebastian Schrader
Kea
Commits
d99048aa
Commit
d99048aa
authored
Feb 10, 2017
by
Tomek Mrugalski
🛰
Browse files
[master] Merge branch 'trac5076' (flex/bison for control agent)
parents
ec8cb347
13db3cbe
Changes
29
Expand all
Hide whitespace changes
Inline
Side-by-side
doc/Makefile.am
View file @
d99048aa
...
...
@@ -6,7 +6,8 @@ EXTRA_DIST += devel/contribute.dox
EXTRA_DIST
+=
devel/mainpage.dox
EXTRA_DIST
+=
devel/unit-tests.dox
nobase_dist_doc_DATA
=
examples/ddns/sample1.json
nobase_dist_doc_DATA
=
examples/agent/simple.json
nobase_dist_doc_DATA
+=
examples/ddns/sample1.json
nobase_dist_doc_DATA
+=
examples/ddns/template.json
nobase_dist_doc_DATA
+=
examples/kea4/advanced.json
nobase_dist_doc_DATA
+=
examples/kea4/backends.json
...
...
doc/examples/agent/simple.json
0 → 100644
View file @
d99048aa
//
This
is
a
simple
example
of
a
configuration
for
Control-Agent
(CA)
or
simply
//
Agent.
This
server
provides
RESTful
interface
for
all
Kea
servers.
{
"Control-agent"
:
{
//
We
need
to
specify
where
the
agent
should
listen
to
incoming
HTTP
//
queries.
Note
that
agent
does
not
provide
SSL
or
TLS
protection
//
on
its
own
,
so
limiting
the
traffic
to
localhost
is
a
good
idea.
"http-host"
:
"localhost"
,
//
Another
mandatory
parameter
is
the
HTTP
port.
"http-port"
:
8000
,
//
This
map
specifies
where
control
channel
of
each
server
is
configured
//
to
listen
on.
See
'control-socket'
object
in
the
respective
//
servers.
At
this
time
the
only
supported
socket
type
is
"unix"
.
//
Make
sure
that
the
Agent
and
respective
servers
configuration
//
matches
exactly
,
otherwise
they
won't
be
able
to
communicate.
"control-sockets"
:
{
//
This
is
how
the
Agent
can
communicate
with
the
DHCPv
4
server.
"dhcp4-server"
:
{
"socket-type"
:
"unix"
,
"socket-name"
:
"/path/to/the/unix/socket-v4"
},
//
Location
of
the
DHCPv
6
command
channel
socket.
"dhcp6-server"
:
{
"socket-type"
:
"unix"
,
"socket-name"
:
"/path/to/the/unix/socket-v6"
},
//
Currently
DHCP-DDNS
(nicknamed
D
2
)
does
not
support
//
command
channel
yet
,
but
we
hope
this
will
change
in
the
//
future.
"d2-server"
:
{
"socket-type"
:
"unix"
,
"socket-name"
:
"/path/to/the/unix/socket-d2"
}
},
//
CA
is
able
to
load
hook
libraries
that
augment
its
operation.
//
The
primary
functionality
is
the
ability
to
add
new
commands.
"hooks-libraries"
:
[
//
Hook
libraries
list
may
contain
more
than
one
library.
{
//
The
only
necessary
parameter
is
the
library
filename.
"library"
:
"/opt/local/control-agent-commands.so"
,
//
Some
libraries
may
support
parameters.
Make
sure
you
//
type
this
section
carefully
,
as
the
CA
does
not
validate
//
it
(because
the
format
is
library
specific).
"parameters"
:
{
"param1"
:
"foo"
}
}
]
},
//
Similar
to
other
Kea
components
,
CA
also
uses
logging.
"Logging"
:
{
"loggers"
:
[
{
"name"
:
"kea-ctrl-agent"
,
"output_options"
:
[
{
"output"
:
"/var/log/kea-ctrl-agent.log"
}
],
"severity"
:
"INFO"
,
"debuglevel"
:
0
}
]
}
}
src/bin/agent/Makefile.am
View file @
d99048aa
...
...
@@ -46,6 +46,9 @@ libagent_la_SOURCES = ctrl_agent_cfg_mgr.cc ctrl_agent_cfg_mgr.h
libagent_la_SOURCES
+=
ctrl_agent_controller.cc ctrl_agent_controller.h
libagent_la_SOURCES
+=
ctrl_agent_log.cc ctrl_agent_log.h
libagent_la_SOURCES
+=
ctrl_agent_process.cc ctrl_agent_process.h
libagent_la_SOURCES
+=
agent_parser.cc agent_parser.h
libagent_la_SOURCES
+=
parser_context.cc parser_context.h parser_context_decl.h
libagent_la_SOURCES
+=
agent_lexer.ll
nodist_libagent_la_SOURCES
=
ctrl_agent_messages.h ctrl_agent_messages.cc
EXTRA_DIST
+=
ctrl_agent_messages.mes
...
...
@@ -76,3 +79,31 @@ kea_ctrl_agent_LDADD += $(LOG4CPLUS_LIBS) $(CRYPTO_LIBS) $(BOOST_LIBS)
kea_ctrl_agent_LDFLAGS
=
$(AM_LDFLAGS)
$(CRYPTO_LDFLAGS)
if
GENERATE_PARSER
parser
:
agent_lexer.cc location.hh position.hh stack.hh agent_parser.cc agent_parser.h
@
echo
"Flex/bison files regenerated"
# --- Flex/Bison stuff below --------------------------------------------------
# When debugging grammar issues, it's useful to add -v to bison parameters.
# bison will generate parser.output file that explains the whole grammar.
# It can be used to manually follow what's going on in the parser.
# This is especially useful if yydebug_ is set to 1 as that variable
# will cause parser to print out its internal state.
# Call flex with -s to check that the default rule can be suppressed
# Call bison with -W to get warnings like unmarked empty rules
# Note C++11 deprecated register still used by flex < 2.6.0
location.hh position.hh stack.hh agent_parser.cc agent_parser.h
:
agent_parser.yy
$(YACC)
--defines
=
agent_parser.h
--report
=
all
--report-file
=
agent_parser.report
-o
agent_parser.cc agent_parser.yy
agent_lexer.cc
:
agent_lexer.ll
$(LEX)
--prefix
agent_
-o
agent_lexer.cc agent_lexer.ll
else
parser location.hh position.hh stack.hh agent_parser.cc agent_parser.h agent_lexer.cc
:
@
echo
Parser generation disabled. Configure with
--enable-generate-parser
to
enable
it.
endif
src/bin/agent/agent_lexer.cc
0 → 100644
View file @
d99048aa
This diff is collapsed.
Click to expand it.
src/bin/agent/agent_lexer.ll
0 → 100644
View file @
d99048aa
/
*
Copyright
(
C
)
2017
Internet
Systems
Consortium
,
Inc
.
(
"ISC"
)
This
Source
Code
Form
is
subject
to
the
terms
of
the
Mozilla
Public
License
,
v
.
2.0
.
If
a
copy
of
the
MPL
was
not
distributed
with
this
file
,
You
can
obtain
one
at
http:
//mozilla
.
org/MPL/
2.0
/
.
*
/
%
{
/
*
-
*
-
C++
-
*
-
*
/
#include
<
cerrno
>
#include
<
climits
>
#include
<
cstdlib
>
#include
<
string
>
#include
<
agent/parser_context
.
h
>
#include
<
asiolink/io_address
.
h
>
#include
<
boost/lexical_cast
.
hpp
>
#include
<
exceptions/exceptions
.
h
>
#include
<
cc
/dhcp_config_error
.
h
>
//
Work
around
an
incompatibility
in
flex
(
at
least
versions
//
2.5
.
31
through
2.5
.
33
)
:
it
generates
code
that
does
//
not
conform
to
C89
.
See
Debian
bug
333231
//
<
http:
//bugs
.
debian
.
org/cgi-bin/bugreport
.
cgi?bug
=
333231
>.
#
undef
yywrap
#
define
yywrap
()
1
namespace
{
bool
start_token_flag
=
false
;
isc:
:
agent:
:
ParserContext:
:ParserType
start_token_value
;
unsigned
int
comment_start_line
=
0
;
using
namespace
isc
;
using
isc:
:
agent:
:AgentParser
;
}
;
//
To
avoid
the
call
to
exit
...
oops
!
#
define
YY_FATAL_ERROR
(
msg
)
isc:
:
agent:
:
ParserContext:
:fatal
(
msg
)
%
}
/
*
noyywrap
disables
automatic
rewinding
for
the
next
file
to
parse
.
Since
we
always
parse
only
a
single
string
,
there's
no
need
to
do
any
wraps
.
And
using
yywrap
requires
linking
with
-lfl
,
which
provides
the
default
yywrap
implementation
that
always
returns
1
anyway
.
*
/
%option
noyywrap
/
*
nounput
simplifies
the
lexer
,
by
removing
support
for
putting
a
character
back
into
the
input
stream
.
We
never
use
such
capability
anyway
.
*
/
%option
nounput
/
*
batch
means
that
we'll
never
use
the
generated
lexer
interactively
.
*
/
%option
batch
/
*
avoid
to
get
static
global
variables
to
remain
with
C++
.
*
/
/
*
in
last
resort
%option
reentrant
*
/
/
*
Enables
debug
mode
.
To
see
the
debug
messages
,
one
needs
to
also
set
yy_flex_debug
to
1
,
then
the
debug
messages
will
be
printed
on
stderr
.
*
/
%option
debug
/
*
I
have
no
idea
what
this
option
does
,
except
it
was
specified
in
the
bison
examples
and
Postgres
folks
added
it
to
remove
gcc
4.3
warnings
.
Let's
be
on
the
safe
side
and
keep
it
.
*
/
%option
noinput
%x
COMMENT
%x
DIR_ENTER
DIR_INCLUDE
DIR_EXIT
/
*
These
are
not
token
expressions
yet
,
just
convenience
expressions
that
can
be
used
during
actual
token
definitions
.
Note
some
can
match
incorrect
inputs
(
e
.
g
.,
IP
addresses
)
which
must
be
checked
.
*
/
int
\-?
[
0-9
]
+
blank
[
\t\r
]
UnicodeEscapeSequence
u
[
0-9
A-Fa-f
]{
4
}
JSONEscapeCharacter
[
"\\/bfnrt]
JSONEscapeSequence {JSONEscapeCharacter}|{UnicodeEscapeSequence}
JSONStandardCharacter [^\x00-\x1f"
\\
]
JSONStringCharacter
{
JSONStandardCharacter
}
|\\
{
JSONEscapeSequence
}
JSONString
\
"{JSONStringCharacter}*\"
/
*
for
errors
*
/
BadUnicodeEscapeSequence
u
[
0-9
A-Fa-f
]{
0
,
3
}[
^
0-9
A-Fa-f
]
BadJSONEscapeSequence
[
^
"\\/bfnrtu]|{BadUnicodeEscapeSequence}
ControlCharacter [\x00-\x1f]
ControlCharacterFill [^"
\\
]
|\\
{
JSONEscapeSequence
}
%
{
//
This
code
run
each
time
a
pattern
is
matched
.
It
updates
the
location
//
by
moving
it
ahead
by
yyleng
bytes
.
yyleng
specifies
the
length
of
the
//
currently
matched
token
.
#
define
YY_USER_ACTION
driver
.
loc_
.
columns
(
yyleng
)
;
%
}
%%
%
{
//
This
part
of
the
code
is
copied
over
to
the
verbatim
to
the
top
//
of
the
generated
yylex
function
.
Explanation:
//
http:
//www
.
gnu
.
org/software/bison/manual/html_node/Multiple-start_002dsymbols
.
html
//
Code
run
each
time
yylex
is
called
.
driver
.
loc_
.
step
()
;
//
We
currently
have
3
points
of
entries
defined:
//
START_JSON
-
which
expects
any
valid
JSON
//
START_AGENT
-
which
expects
full
configuration
(
with
outer
map
and
Control-agent
//
object
in
it
.
//
START_SUB_AGENT
-
which
expects
only
content
of
the
Control-agent
,
this
is
//
primarily
useful
for
testing
.
if
(
start_token_flag
)
{
start_token_flag
=
false
;
switch
(
start_token_value
)
{
case
ParserContext:
:
PARSER_JSON:
default:
return
isc:
:
agent:
:
AgentParser:
:make_START_JSON
(
driver
.
loc_
)
;
case
ParserContext:
:
PARSER_AGENT:
return
isc:
:
agent:
:
AgentParser:
:make_START_AGENT
(
driver
.
loc_
)
;
case
ParserContext:
:
PARSER_SUB_AGENT:
return
isc:
:
agent:
:
AgentParser:
:make_START_SUB_AGENT
(
driver
.
loc_
)
;
}
}
%
}
#
.*
;
"//"
(.*)
;
"/*"
{
BEGIN
(
COMMENT
)
;
comment_start_line
=
driver
.
loc_
.
end
.
line
;;
}
<
COMMENT
>
"*/"
BEGIN
(
INITIAL
)
;
<
COMMENT
>.
;
<
COMMENT
><<
EOF
>>
{
isc_throw
(
ParseError
,
"Comment not closed. (/* in line "
<<
comment_start_line
)
;
}
"<?"
BEGIN
(
DIR_ENTER
)
;
<
DIR_ENTER
>
"include"
BEGIN
(
DIR_INCLUDE
)
;
<
DIR_INCLUDE
>
\
"([^\"
\n
])
+\
" {
// Include directive.
// Extract the filename.
std::string tmp(yytext+1);
tmp.resize(tmp.size() - 1);
driver.includeFile(tmp);
}
<DIR_ENTER,DIR_INCLUDE,DIR_EXIT><<EOF>> {
isc_throw(ParseError, "
Directive
not
closed
.
");
}
<DIR_EXIT>"
?
>
" BEGIN(INITIAL);
<*>{blank}+ {
// Ok, we found a with space. Let's ignore it and update loc variable.
driver.loc_.step();
}
<*>[\n]+ {
// Newline found. Let's update the location and continue.
driver.loc_.lines(yyleng);
driver.loc_.step();
}
\"
Control-agent\
" {
switch(driver.ctx_) {
case ParserContext::CONFIG:
return AgentParser::make_CONTROL_AGENT(driver.loc_);
default:
return AgentParser::make_STRING("
Control-agent
", driver.loc_);
}
}
\"
http-host\
" {
switch(driver.ctx_) {
case ParserContext::AGENT:
return AgentParser::make_HTTP_HOST(driver.loc_);
default:
return AgentParser::make_STRING("
http-host
", driver.loc_);
}
}
\"
http-port\
" {
switch(driver.ctx_) {
case ParserContext::AGENT:
return AgentParser::make_HTTP_PORT(driver.loc_);
default:
return AgentParser::make_STRING("
http-port
", driver.loc_);
}
}
\"
control-sockets\
" {
switch(driver.ctx_) {
case ParserContext::AGENT:
return AgentParser::make_CONTROL_SOCKETS(driver.loc_);
default:
return AgentParser::make_STRING("
control-sockets
", driver.loc_);
}
}
\"
dhcp4-server\
" {
switch(driver.ctx_) {
case ParserContext::CONTROL_SOCKETS:
return AgentParser::make_DHCP4_SERVER(driver.loc_);
default:
return AgentParser::make_STRING("
dhcp4-server
", driver.loc_);
}
}
\"
dhcp6-server\
" {
switch(driver.ctx_) {
case ParserContext::CONTROL_SOCKETS:
return AgentParser::make_DHCP6_SERVER(driver.loc_);
default:
return AgentParser::make_STRING("
dhcp6-server
", driver.loc_);
}
}
\"
d2-server\
" {
switch(driver.ctx_) {
case ParserContext::CONTROL_SOCKETS:
return AgentParser::make_D2_SERVER(driver.loc_);
default:
return AgentParser::make_STRING("
d2-server
", driver.loc_);
}
}
\"
socket-
name
\
" {
switch(driver.ctx_) {
case ParserContext::SERVER:
return AgentParser::make_SOCKET_NAME(driver.loc_);
default:
return AgentParser::make_STRING("
socket-
name
", driver.loc_);
}
}
\"
socket-
type
\
" {
switch(driver.ctx_) {
case ParserContext::SERVER:
return AgentParser::make_SOCKET_TYPE(driver.loc_);
default:
return AgentParser::make_STRING("
socket-
type
", driver.loc_);
}
}
\"
unix\
" {
switch(driver.ctx_) {
case ParserContext::SOCKET_TYPE:
return AgentParser::make_UNIX(driver.loc_);
default:
return AgentParser::make_STRING("
unix
", driver.loc_);
}
}
\"
hooks-libraries\
" {
switch(driver.ctx_) {
case ParserContext::AGENT:
return AgentParser::make_HOOKS_LIBRARIES(driver.loc_);
default:
return AgentParser::make_STRING("
hooks-libraries
", driver.loc_);
}
}
\"
library\
" {
switch(driver.ctx_) {
case ParserContext::HOOKS_LIBRARIES:
return AgentParser::make_LIBRARY(driver.loc_);
default:
return AgentParser::make_STRING("
library
", driver.loc_);
}
}
\"
parameters\
" {
switch(driver.ctx_) {
case ParserContext::HOOKS_LIBRARIES:
return AgentParser::make_PARAMETERS(driver.loc_);
default:
return AgentParser::make_STRING("
parameters
", driver.loc_);
}
}
\"
Logging\
" {
switch(driver.ctx_) {
case ParserContext::CONFIG:
return AgentParser::make_LOGGING(driver.loc_);
default:
return AgentParser::make_STRING("
Logging
", driver.loc_);
}
}
\"
loggers\
" {
switch(driver.ctx_) {
case ParserContext::LOGGING:
return AgentParser::make_LOGGERS(driver.loc_);
default:
return AgentParser::make_STRING("
loggers
", driver.loc_);
}
}
\"
name
\
" {
switch(driver.ctx_) {
case ParserContext::LOGGERS:
return AgentParser::make_NAME(driver.loc_);
default:
return AgentParser::make_STRING("
name
", driver.loc_);
}
}
\"
output_options\
" {
switch(driver.ctx_) {
case ParserContext::LOGGERS:
return AgentParser::make_OUTPUT_OPTIONS(driver.loc_);
default:
return AgentParser::make_STRING("
output_options
", driver.loc_);
}
}
\"
output\
" {
switch(driver.ctx_) {
case ParserContext::OUTPUT_OPTIONS:
return AgentParser::make_OUTPUT(driver.loc_);
default:
return AgentParser::make_STRING("
output
", driver.loc_);
}
}
\"
debuglevel\
" {
switch(driver.ctx_) {
case ParserContext::LOGGERS:
return AgentParser::make_DEBUGLEVEL(driver.loc_);
default:
return AgentParser::make_STRING("
debuglevel
", driver.loc_);
}
}
\"
severity\
" {
switch(driver.ctx_) {
case ParserContext::LOGGERS:
return AgentParser::make_SEVERITY(driver.loc_);
default:
return AgentParser::make_STRING("
severity
", driver.loc_);
}
}
\"
Dhcp4\
" {
switch(driver.ctx_) {
case ParserContext::CONFIG:
return AgentParser::make_DHCP4(driver.loc_);
default:
return AgentParser::make_STRING("
Dhcp4
", driver.loc_);
}
}
\"
Dhcp6\
" {
switch(driver.ctx_) {
case ParserContext::CONFIG:
return AgentParser::make_DHCP6(driver.loc_);
default:
return AgentParser::make_STRING("
Dhcp6
", driver.loc_);
}
}
\"
DhcpDdns\
" {
switch(driver.ctx_) {
case ParserContext::CONFIG:
return AgentParser::make_DHCPDDNS(driver.loc_);
default:
return AgentParser::make_STRING("
DhcpDdns
", driver.loc_);
}
}
{JSONString} {
// A string has been matched. It contains the actual string and single quotes.
// We need to get those quotes out of the way and just use its content, e.g.
// for 'foo' we should get foo
std::string raw(yytext+1);
size_t len = raw.size() - 1;
raw.resize(len);
std::string decoded;
decoded.reserve(len);
for (size_t pos = 0; pos < len; ++pos) {
int b = 0;
char c = raw[pos];
switch (c) {
case '"
':
//
impossible
condition
driver
.
error
(
driver
.
loc_
,
"Bad quote in \"" + raw + "
\
""
)
;
case
'\\':
++pos
;
if
(
pos
>=
len
)
{
//
impossible
condition
driver
.
error
(
driver
.
loc_
,
"Overflow escape in \"" + raw + "
\
""
)
;
}
c
=
raw
[
pos
]
;
switch
(
c
)
{
case
'
"':
case '\\':
case '/':
decoded.push_back(c);
break;
case 'b':
decoded.push_back('\b');
break;
case 'f':
decoded.push_back('\f');
break;
case 'n':
decoded.push_back('\n');
break;
case 'r':
decoded.push_back('\r');
break;
case 't':
decoded.push_back('\t');
break;
case 'u':
// support only \u0000 to \u00ff
++pos;
if (pos + 4 > len) {
// impossible condition
driver.error(driver.loc_,
"
Overflow
unicode
escape
in
\
""
+
raw
+
"\"");
}
if ((raw[pos] != '0') || (raw[pos + 1] != '0')) {
driver.error(driver.loc_, "
Unsupported
unicode
escape
in
\
""
+
raw
+
"\"");
}
pos += 2;
c = raw[pos];
if ((c >= '0') && (c <= '9')) {
b = (c - '0') << 4;
} else if ((c >= 'A') && (c <= 'F')) {
b = (c - 'A' + 10) << 4;
} else if ((c >= 'a') && (c <= 'f')) {
b = (c - 'a' + 10) << 4;
} else {
// impossible condition
driver.error(driver.loc_, "
Not
hexadecimal
in
unicode
escape
in
\
""
+
raw
+
"\"");
}
pos++;
c = raw[pos];
if ((c >= '0') && (c <= '9')) {
b |= c - '0';
} else if ((c >= 'A') && (c <= 'F')) {
b |= c - 'A' + 10;
} else if ((c >= 'a') && (c <= 'f')) {
b |= c - 'a' + 10;
} else {
// impossible condition
driver.error(driver.loc_, "
Not
hexadecimal
in
unicode
escape
in
\
""
+
raw
+
"\"");
}
decoded.push_back(static_cast<char>(b & 0xff));
break;
default:
// impossible condition
driver.error(driver.loc_, "
Bad
escape
in
\
""
+
raw
+
"\"");
}
break;
default:
if ((c >= 0) && (c < 0x20)) {
// impossible condition
driver.error(driver.loc_, "
Invalid
control
in
\
""
+
raw
+
"\"");
}
decoded.push_back(c);
}
}
return AgentParser::make_STRING(decoded, driver.loc_);
}
\"
{
JSONStringCharacter
}*{
ControlCharacter
}{
ControlCharacterFill
}*
\
" {
// Bad string with a forbidden control character inside
driver.error(driver.loc_, "
Invalid
control
in
" + std::string(yytext));
}
\"
{
JSONStringCharacter
}*
\\
{
BadJSONEscapeSequence
}[
^\
x
00
-\
x
1
f
"]*\"
{
//
Bad
string
with
a
bad
escape
inside