Skip to content
GitLab
Projects
Groups
Snippets
/
Help
Help
Support
Community forum
Keyboard shortcuts
?
Submit feedback
Contribute to GitLab
Sign in / Register
Toggle navigation
Menu
Open sidebar
ISC Open Source Projects
Kea
Commits
d659abdd
Commit
d659abdd
authored
Aug 29, 2012
by
Naoki Kambe
Browse files
[master] Merge branch 'trac2179'
Conflicts: ChangeLog
parents
73270385
4d9bfbae
Changes
11
Expand all
Hide whitespace changes
Inline
Side-by-side
ChangeLog
View file @
d659abdd
470. [func] naokikambe
The stats module now supports partial statistics updates. Each
module can return only statistics data which have been updated since
the last time it sent them to the stats module. The purpose of partial
updates is to reduce the amount of statistics data sent through the
message queue.
(Trac #2179, git TBD)
469. [bug] jelte
libdatasrc: the data source client list class now ignores zone
content problems (such as out-of-zone data) in MasterFiles type
...
...
src/bin/stats/stats.py.in
View file @
d659abdd
...
...
@@ -94,6 +94,9 @@ def get_spec_defaults(spec):
return spec.get(
"item_default",
dict([ (s["item_name"], _get_spec_defaults(s)) for s in spec["map_item_spec"] ]) )
elif item_type == "named_set":
# in a named_set type, it returns {} as a default value
return spec.get("item_default", {})
else:
return spec.get("item_default", None)
return dict([ (s['item_name'], _get_spec_defaults(s)) for s in spec ])
...
...
@@ -137,6 +140,33 @@ def _accum(a, b):
# Nothing matches above, the first arg is returned
return a
def merge_oldnew(old, new):
"""
Merges two arguments. If old data contains the corresponding name
against new data, the value of the name is replaced with the
corresponding value in new data. Otherwise, the new date are added
at same name or same id. Both old data and new data should be same
data type. This method returns the merged result.
"""
# If the first arg is dict or list type, two values
# would be merged
if type(old) is dict and type(new) is dict:
return dict([ (k, merge_oldnew(old[k], v)) \
if k in old else (k, v) \
for (k, v) in new.items() ] \
+ [ (k, v) \
for (k, v) in old.items() \
if k not in new ])
elif type(old) is list and type(new) is list:
return [ merge_oldnew(old[i], new[i]) \
if len(old) > i else new[i] \
for i in range(len(new)) ] \
+ [ old[i] \
for i in range(len(old)) \
if len(new) <= i ]
else:
return new
class Callback():
"""
A Callback handler class
...
...
@@ -486,17 +516,48 @@ class Stats:
# would be updated.
errors = []
if owner and data:
_data = self.statistics_data_bymid.copy()
try:
if self.modules[owner].validate_statistics(False, data, errors):
if owner in self.statistics_data_bymid:
if mid in self.statistics_data_bymid[owner]:
self.statistics_data_bymid[owner][mid].update(data)
for (_key, _val) in data.items():
if self.modules[owner].validate_statistics(
False, {_key: _val}, errors):
if owner not in _data:
_data[owner] = { mid: { _key: _val } }
elif mid not in _data[owner]:
_data[owner][mid] = { _key: _val }
else:
self.statistics_data_bymid[owner][mid] = data
else:
self.statistics_data_bymid[owner] = { mid : data }
# merge recursively old value and new
# value each other
_data[owner][mid] = \
merge_oldnew(_data[owner][mid],
{_key: _val})
continue
# the key string might be a "xx/yy/zz[0]"
# type. try it.
if _key.find('/') >= 0 or \
isc.cc.data.identifier_has_list_index(_key):
# remove the last error
if errors: errors.pop()
# try updata and check validation in adavance
__data = _data.copy()
if owner not in _data:
__data[owner] = {}
if mid not in _data[owner]:
__data[owner][mid] = {}
# use the isc.cc.data.set method
try:
isc.cc.data.set(__data[owner][mid],
_key, _val)
if self.modules[owner].validate_statistics(
False, __data[owner][mid], errors):
_data = __data
except Exception as e:
errors.append(
"%s: %s" % (e.__class__.__name__, e))
except KeyError:
errors.append("unknown module name: " + str(owner))
if not errors:
self.statistics_data_bymid = _data
# Just consolidate statistics data of each module without
# removing that of modules which have been already dead
...
...
@@ -504,7 +565,19 @@ class Stats:
for m in mlist:
if self.statistics_data_bymid[m]:
if m in self.statistics_data:
self.statistics_data[m].update(
# propagate the default values by times of
# instances
_len = len(self.statistics_data_bymid[m])
for i in range(0, _len - 1):
self.statistics_data[m] = _accum(
self.statistics_data[m],
self.statistics_data[m])
# replace the default values with summaries of the
# collected values of each module. But the default
# values which are not included in collected
# values are not replaced.
self.statistics_data[m] = merge_oldnew(
self.statistics_data[m],
_accum_bymodule(
self.statistics_data_bymid[m]))
...
...
src/bin/stats/tests/b10-stats-httpd_test.py
View file @
d659abdd
...
...
@@ -53,7 +53,13 @@ DUMMY_DATA = {
"zonename"
:
"test.example"
,
"queries.tcp"
:
2
,
"queries.udp"
:
3
}]
}],
"nds_queries.perzone"
:
{
"test.example"
:
{
"queries.tcp"
:
2
,
"queries.udp"
:
3
}
}
},
'Stats'
:
{
"report_time"
:
"2011-03-04T11:59:19Z"
,
...
...
src/bin/stats/tests/b10-stats_test.py
View file @
d659abdd
This diff is collapsed.
Click to expand it.
src/bin/stats/tests/test_utils.py
View file @
d659abdd
...
...
@@ -354,6 +354,49 @@ class MockAuth:
}
]
}
},
{
"item_name": "nds_queries.perzone",
"item_type": "named_set",
"item_optional": false,
"item_default": {
"test10.example" : {
"queries.udp" : 1,
"queries.tcp" : 2
},
"test20.example" : {
"queries.udp" : 3,
"queries.tcp" : 4
}
},
"item_title": "Queries per zone",
"item_description": "Queries per zone",
"named_set_item_spec": {
"item_name": "zonename",
"item_type": "map",
"item_optional": false,
"item_default": {},
"item_title": "Zonename",
"item_description": "Zonename",
"map_item_spec": [
{
"item_name": "queries.udp",
"item_type": "integer",
"item_optional": false,
"item_default": 0,
"item_title": "Queries UDP per zone",
"item_description": "A number of UDP query counts per zone"
},
{
"item_name": "queries.tcp",
"item_type": "integer",
"item_optional": false,
"item_default": 0,
"item_title": "Queries TCP per zone",
"item_description": "A number of TCP query counts per zone"
}
]
}
}
]
}
...
...
@@ -378,6 +421,12 @@ class MockAuth:
'queries.tcp'
:
5
,
'queries.udp'
:
4
}]
self
.
nds_queries_per_zone
=
{
'test10.example'
:
{
'queries.tcp'
:
5
,
'queries.udp'
:
4
}
}
def
run
(
self
):
self
.
mccs
.
start
()
...
...
@@ -399,7 +448,19 @@ class MockAuth:
self
.
got_command_name
=
command
sdata
=
{
'queries.tcp'
:
self
.
queries_tcp
,
'queries.udp'
:
self
.
queries_udp
,
'queries.perzone'
:
self
.
queries_per_zone
}
'queries.perzone'
:
self
.
queries_per_zone
,
'nds_queries.perzone'
:
{
'test10.example'
:
{
'queries.tcp'
:
\
isc
.
cc
.
data
.
find
(
self
.
nds_queries_per_zone
,
'test10.example/queries.tcp'
)
}
},
'nds_queries.perzone/test10.example/queries.udp'
:
isc
.
cc
.
data
.
find
(
self
.
nds_queries_per_zone
,
'test10.example/queries.udp'
)
}
if
command
==
'getstats'
:
return
isc
.
config
.
create_answer
(
0
,
sdata
)
return
isc
.
config
.
create_answer
(
1
,
"Unknown Command"
)
...
...
src/lib/config/tests/module_spec_unittests.cc
View file @
d659abdd
...
...
@@ -224,6 +224,15 @@ TEST(ModuleSpec, StatisticsValidation) {
ElementPtr
errors
=
Element
::
createList
();
EXPECT_FALSE
(
statisticsTestWithErrors
(
dd
,
"data33_2.data"
,
errors
));
EXPECT_EQ
(
"[
\"
Format mismatch
\"
,
\"
Format mismatch
\"
,
\"
Format mismatch
\"
]"
,
errors
->
str
());
dd
=
moduleSpecFromFile
(
specfile
(
"spec41.spec"
));
EXPECT_TRUE
(
statisticsTest
(
dd
,
"data41_1.data"
));
EXPECT_FALSE
(
statisticsTest
(
dd
,
"data41_2.data"
));
errors
=
Element
::
createList
();
EXPECT_FALSE
(
statisticsTestWithErrors
(
dd
,
"data41_2.data"
,
errors
));
EXPECT_EQ
(
"[
\"
Type mismatch
\"
]"
,
errors
->
str
());
}
TEST
(
ModuleSpec
,
CommandValidation
)
{
...
...
src/lib/config/tests/testdata/Makefile.am
View file @
d659abdd
...
...
@@ -27,6 +27,8 @@ EXTRA_DIST += data32_2.data
EXTRA_DIST
+=
data32_3.data
EXTRA_DIST
+=
data33_1.data
EXTRA_DIST
+=
data33_2.data
EXTRA_DIST
+=
data41_1.data
EXTRA_DIST
+=
data41_2.data
EXTRA_DIST
+=
spec1.spec
EXTRA_DIST
+=
spec2.spec
EXTRA_DIST
+=
spec3.spec
...
...
@@ -67,3 +69,4 @@ EXTRA_DIST += spec37.spec
EXTRA_DIST
+=
spec38.spec
EXTRA_DIST
+=
spec39.spec
EXTRA_DIST
+=
spec40.spec
EXTRA_DIST
+=
spec41.spec
src/lib/config/tests/testdata/data41_1.data
0 → 100644
View file @
d659abdd
{
"zones": {
"example.org": {
"queries.tcp": 100,
"queries.udp": 200
},
"example.net": {
"queries.tcp": 300,
"queries.udp": 400
}
}
}
src/lib/config/tests/testdata/data41_2.data
0 → 100644
View file @
d659abdd
{
"zones": [
{
"example.org": {
"queries.tcp": 100,
"queries.udp": 200
}
},
{
"example.net": {
"queries.tcp": 300,
"queries.udp": 400
}
}
]
}
src/lib/config/tests/testdata/spec41.spec
0 → 100644
View file @
d659abdd
{
"module_spec": {
"module_name": "Spec40",
"statistics": [
{
"item_name": "zones",
"item_type": "named_set",
"item_optional": false,
"item_default": { },
"item_title": "Dummy name set",
"item_description": "A dummy name set",
"named_set_item_spec": {
"item_name": "zonename",
"item_type": "map",
"item_optional": false,
"item_default": { },
"map_item_spec": [
{
"item_name": "queries.tcp",
"item_optional": false,
"item_type": "integer",
"item_default": 0
},
{
"item_name": "queries.udp",
"item_optional": false,
"item_type": "integer",
"item_default": 0
}
]
}
}
]
}
}
src/lib/python/isc/config/tests/module_spec_test.py
View file @
d659abdd
...
...
@@ -138,6 +138,8 @@ class TestModuleSpec(unittest.TestCase):
self
.
assertFalse
(
self
.
read_spec_file
(
"spec1.spec"
).
validate_statistics
(
True
,
None
,
None
));
self
.
assertTrue
(
_validate_stat
(
"spec33.spec"
,
"data33_1.data"
))
self
.
assertFalse
(
_validate_stat
(
"spec33.spec"
,
"data33_2.data"
))
self
.
assertTrue
(
_validate_stat
(
"spec41.spec"
,
"data41_1.data"
))
self
.
assertFalse
(
_validate_stat
(
"spec41.spec"
,
"data41_2.data"
))
def
test_init
(
self
):
self
.
assertRaises
(
ModuleSpecError
,
ModuleSpec
,
1
)
...
...
Write
Preview
Supports
Markdown
0%
Try again
or
attach a new file
.
Cancel
You are about to add
0
people
to the discussion. Proceed with caution.
Finish editing this message first!
Cancel
Please
register
or
sign in
to comment