-
-
Notifications
You must be signed in to change notification settings - Fork 125
Commit
This commit does not belong to any branch on this repository, and may belong to a fork outside of the repository.
[timeseries] Add initial support for elasticsearch #99
Closes #99
1 parent
7b2a410
commit 044a29f
Showing
26 changed files
with
1,356 additions
and
155 deletions.
There are no files selected for viewing
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
Original file line number | Diff line number | Diff line change |
---|---|---|
@@ -1,7 +1,5 @@ | ||
from .backends import timeseries_db | ||
|
||
chart_query = timeseries_db.queries.chart_query | ||
default_chart_query = timeseries_db.queries.default_chart_query | ||
device_data_query = timeseries_db.queries.device_data_query | ||
|
||
__all__ = ['timeseries_db', 'chart_query', 'default_chart_query', 'device_data_query'] | ||
__all__ = ['timeseries_db', 'chart_query'] |
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
Original file line number | Diff line number | Diff line change |
---|---|---|
@@ -0,0 +1,3 @@ | ||
from .queries import _make_query | ||
|
||
__all__ = ['_make_query'] |
466 changes: 466 additions & 0 deletions
466
openwisp_monitoring/db/backends/elasticsearch/client.py
Large diffs are not rendered by default.
Oops, something went wrong.
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
Original file line number | Diff line number | Diff line change |
---|---|---|
@@ -0,0 +1,92 @@ | ||
import uuid | ||
|
||
from django.conf import settings | ||
from elasticsearch.exceptions import NotFoundError | ||
from elasticsearch_dsl import Date, Document, InnerDoc, Nested, Q, Search | ||
|
||
|
||
class Point(InnerDoc): | ||
time = Date(required=True, default_timezone=settings.TIME_ZONE) | ||
fields = Nested(dynamic=True, required=True, multi=True) | ||
|
||
|
||
class MetricDocument(Document): | ||
tags = Nested(dynamic=True, required=False, multi=True) | ||
points = Nested(Point) | ||
|
||
class Index: | ||
name = 'metric' | ||
settings = { | ||
'number_of_shards': 1, | ||
'number_of_replicas': 0, | ||
'lifecycle.name': 'default', | ||
'lifecycle.rollover_alias': 'metric', | ||
} | ||
|
||
|
||
def find_metric(client, index, tags, retention_policy=None, add=False): | ||
search = Search(using=client, index=index) | ||
if tags: | ||
tags_dict = dict() | ||
for key, value in tags.items(): | ||
tags_dict[f'tags.{key}'] = value | ||
q = Q( | ||
'nested', | ||
path='tags', | ||
query=Q( | ||
'bool', must=[Q('match', **{k: str(v)}) for k, v in tags_dict.items()] | ||
), | ||
) | ||
else: | ||
q = Q() | ||
try: | ||
result = list(search.query(q).execute())[0].meta | ||
return result['id'], result['index'] | ||
except (NotFoundError, AttributeError, IndexError): | ||
if add: | ||
document = create_document( | ||
client, index, tags, retention_policy=retention_policy | ||
) | ||
return document['_id'], document['_index'] | ||
return None | ||
|
||
|
||
def create_document(client, key, tags, _id=None, retention_policy=None): | ||
""" | ||
Adds document to relevant index using ``keys``, ``tags`` and ``id`` provided. | ||
If no ``id`` is provided a random ``uuid`` would be used. | ||
""" | ||
_id = str(_id or uuid.uuid1()) | ||
# If index exists, create the document and return | ||
try: | ||
index_aliases = client.indices.get_alias(index=key) | ||
for k, v in index_aliases.items(): | ||
if v['aliases'][key]['is_write_index']: | ||
break | ||
client.create(index=k, id=_id, body={'tags': tags}) | ||
return {'_id': _id, '_index': k} | ||
except NotFoundError: | ||
pass | ||
# Create a new index if it doesn't exist | ||
name = f'{key}-000001' | ||
document = MetricDocument(meta={'id': _id}) | ||
document._index = document._index.clone(name) | ||
# Create a new index template if it doesn't exist | ||
if not client.indices.exists_template(name=key): | ||
document._index.settings(**{'lifecycle.rollover_alias': key}) | ||
if retention_policy: | ||
document._index.settings(**{'lifecycle.name': retention_policy}) | ||
# add index pattern is added for Index Lifecycle Management | ||
document._index.as_template(key, f'{key}-*').save(using=client) | ||
document.init(using=client, index=name) | ||
document.meta.index = name | ||
document.tags = tags | ||
document.save(using=client, index=name) | ||
client.indices.put_alias(index=name, name=key, body={'is_write_index': True}) | ||
if retention_policy: | ||
client.indices.put_settings( | ||
body={'lifecycle.name': retention_policy}, index=name | ||
) | ||
client.indices.put_settings(body={'lifecycle.rollover_alias': key}, index=name) | ||
client.indices.refresh(index=key) | ||
return document.to_dict(include_meta=True) |
130 changes: 130 additions & 0 deletions
130
openwisp_monitoring/db/backends/elasticsearch/queries.py
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
Original file line number | Diff line number | Diff line change |
---|---|---|
@@ -0,0 +1,130 @@ | ||
import operator | ||
from copy import deepcopy | ||
|
||
from openwisp_utils.utils import deep_merge_dicts | ||
|
||
from .settings import ADDITIONAL_CHART_OPERATIONS | ||
|
||
default_chart_query = { | ||
'query': { | ||
'nested': { | ||
'path': 'tags', | ||
'query': { | ||
'bool': { | ||
'must': [ | ||
{'match': {'tags.object_id': {'query': '{object_id}'}}}, | ||
{'match': {'tags.content_type': {'query': '{content_type}'}}}, | ||
] | ||
} | ||
}, | ||
}, | ||
}, | ||
'_source': False, | ||
'size': 0, | ||
'aggs': { | ||
'GroupByTime': { | ||
'nested': { | ||
'path': 'points', | ||
'aggs': { | ||
'set_range': { | ||
'filter': { | ||
'range': { | ||
'points.time': {'from': 'now-1d/d', 'to': 'now/d'} | ||
} | ||
}, | ||
'aggs': { | ||
'time': { | ||
'date_histogram': { | ||
'field': 'points.time', | ||
'fixed_interval': '10m', | ||
'format': 'date_time_no_millis', | ||
'order': {'_key': 'desc'}, | ||
}, | ||
'aggs': { | ||
'nest': { | ||
'nested': { | ||
'path': 'points.fields', | ||
'aggs': { | ||
'{field_name}': { | ||
'avg': { | ||
'field': 'points.fields.{field_name}' | ||
} | ||
} | ||
}, | ||
} | ||
}, | ||
}, | ||
}, | ||
}, | ||
} | ||
}, | ||
} | ||
} | ||
}, | ||
} | ||
|
||
math_map = { | ||
'uptime': {'operator': '*', 'value': 100}, | ||
'memory_usage': {'operator': '*', 'value': 100}, | ||
'CPU_load': {'operator': '*', 'value': 100}, | ||
'disk_usage': {'operator': '*', 'value': 100}, | ||
'upload': {'operator': '/', 'value': 1000000000}, | ||
'download': {'operator': '/', 'value': 1000000000}, | ||
} | ||
|
||
operator_lookup = { | ||
'+': operator.add, | ||
'-': operator.sub, | ||
'*': operator.mul, | ||
'/': operator.truediv, | ||
} | ||
|
||
if ADDITIONAL_CHART_OPERATIONS: | ||
assert isinstance(ADDITIONAL_CHART_OPERATIONS, dict) | ||
for value in ADDITIONAL_CHART_OPERATIONS.values(): | ||
assert value['operator'] in operator_lookup | ||
assert isinstance(value['value'], (int, float)) | ||
math_map = deep_merge_dicts(math_map, ADDITIONAL_CHART_OPERATIONS) | ||
|
||
|
||
def _make_query(aggregation=None): | ||
query = deepcopy(default_chart_query) | ||
if aggregation: | ||
query['aggs']['GroupByTime']['nested']['aggs']['set_range']['aggs']['time'][ | ||
'aggs' | ||
]['nest']['nested']['aggs'] = aggregation | ||
return query | ||
|
||
|
||
def _get_chart_query(): | ||
aggregation_dict = { | ||
'uptime': {'uptime': {'avg': {'field': 'points.fields.reachable'}}}, | ||
'packet_loss': {'packet_loss': {'avg': {'field': 'points.fields.loss'}}}, | ||
'rtt': { | ||
'RTT_average': {'avg': {'field': 'points.fields.rtt_avg'}}, | ||
'RTT_max': {'avg': {'field': 'points.fields.rtt_max'}}, | ||
'RTT_min': {'avg': {'field': 'points.fields.rtt_min'}}, | ||
}, | ||
'traffic': { | ||
'upload': {'sum': {'field': 'points.fields.tx_bytes'}}, | ||
'download': {'sum': {'field': 'points.fields.rx_bytes'}}, | ||
}, | ||
'wifi_clients': { | ||
'wifi_clients': { | ||
'cardinality': { | ||
'field': 'points.fields.{field_name}.keyword', | ||
'missing': 0, | ||
} | ||
} | ||
}, | ||
'memory': {'memory_usage': {'avg': {'field': 'points.fields.percent_used'}}}, | ||
'cpu': {'CPU_load': {'avg': {'field': 'points.fields.cpu_usage'}}}, | ||
'disk': {'disk_usage': {'avg': {'field': 'points.fields.used_disk'}}}, | ||
} | ||
query = {} | ||
for key, value in aggregation_dict.items(): | ||
query[key] = {'elasticsearch': _make_query(value)} | ||
return query | ||
|
||
|
||
chart_query = _get_chart_query() |
41 changes: 41 additions & 0 deletions
41
openwisp_monitoring/db/backends/elasticsearch/retention_policies.py
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
Original file line number | Diff line number | Diff line change |
---|---|---|
@@ -0,0 +1,41 @@ | ||
# By default age is calculated from the date the index is created but if the | ||
# index has been rolled over than the rollover date is used to calculate the age | ||
|
||
default_rp_policy = { | ||
'policy': { | ||
'phases': { | ||
'hot': { | ||
'actions': { | ||
'rollover': {'max_age': '30d', 'max_size': '90G'}, | ||
'set_priority': {'priority': 100}, | ||
} | ||
}, | ||
'warm': { | ||
'min_age': '30d', | ||
'actions': { | ||
'forcemerge': {'max_num_segments': 1}, | ||
'allocate': {'number_of_replicas': 0}, | ||
'set_priority': {'priority': 50}, | ||
}, | ||
}, | ||
'cold': {'min_age': '150d', 'actions': {'freeze': {}}}, | ||
'delete': {'min_age': '335d', 'actions': {'delete': {}}}, | ||
} | ||
} | ||
} | ||
|
||
|
||
def _make_policy(max_age): | ||
return { | ||
'policy': { | ||
'phases': { | ||
'hot': { | ||
'actions': { | ||
'rollover': {'max_age': max_age}, | ||
'set_priority': {'priority': 100}, | ||
} | ||
}, | ||
'delete': {'actions': {'delete': {}}}, | ||
} | ||
} | ||
} |
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
Original file line number | Diff line number | Diff line change |
---|---|---|
@@ -0,0 +1,5 @@ | ||
from django.conf import settings | ||
|
||
ADDITIONAL_CHART_OPERATIONS = getattr( | ||
settings, 'OPENWISP_MONITORING_ADDITIONAL_CHART_OPERATIONS', {} | ||
) |
File renamed without changes.
296 changes: 296 additions & 0 deletions
296
openwisp_monitoring/db/backends/elasticsearch/tests/client_tests.py
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
Original file line number | Diff line number | Diff line change |
---|---|---|
@@ -0,0 +1,296 @@ | ||
from datetime import datetime, timedelta | ||
from importlib import reload | ||
from unittest.mock import patch | ||
|
||
from celery.exceptions import Retry | ||
from django.conf import settings | ||
from django.core.exceptions import ValidationError | ||
from django.utils.timezone import now | ||
from elasticsearch.exceptions import ElasticsearchException | ||
from freezegun import freeze_time | ||
from pytz import timezone as tz | ||
|
||
from openwisp_monitoring.device.settings import SHORT_RETENTION_POLICY | ||
from openwisp_monitoring.device.tests import DeviceMonitoringTestCase | ||
from openwisp_monitoring.device.utils import SHORT_RP, manage_short_retention_policy | ||
|
||
from ....exceptions import TimeseriesWriteException | ||
from ... import timeseries_db | ||
from .. import queries as queries_module | ||
from ..index import MetricDocument | ||
|
||
|
||
class TestDatabaseClient(DeviceMonitoringTestCase): | ||
def test_get_query_fields_function(self): | ||
c = self._create_chart(test_data=None, configuration='histogram') | ||
q = c.get_query(fields=['ssh', 'http2', 'apple-music']) | ||
self.assertIn("'ssh': {'sum': {'field': 'points.fields.ssh'}}", str(q)) | ||
self.assertIn("'http2': {'sum': {'field': 'points.fields.http2'}}", str(q)) | ||
self.assertIn( | ||
"'apple-music': {'sum': {'field': 'points.fields.apple-music'}}", str(q) | ||
) | ||
|
||
def test_default_query(self): | ||
c = self._create_chart(test_data=False) | ||
q = timeseries_db.default_chart_query(tags=True) | ||
self.assertEqual(c.query, q) | ||
|
||
def test_write(self): | ||
timeseries_db.write('test_write', dict(value=2)) | ||
measurement = timeseries_db.read(key='test_write', fields='value')[0] | ||
self.assertEqual(measurement['value'], 2) | ||
|
||
def test_general_write(self): | ||
m = self._create_general_metric(name='Sync test') | ||
m.write(1) | ||
measurement = timeseries_db.read(key='sync_test', fields='value')[0] | ||
self.assertEqual(measurement['value'], 1) | ||
|
||
def test_object_write(self): | ||
om = self._create_object_metric() | ||
om.write(3) | ||
measurement = timeseries_db.read( | ||
key='test_metric', fields='value', tags=om.tags | ||
)[0] | ||
self.assertEqual(measurement['value'], 3) | ||
|
||
def test_general_same_key_different_fields(self): | ||
down = self._create_general_metric( | ||
name='traffic (download)', key='traffic', field_name='download' | ||
) | ||
down.write(200) | ||
up = self._create_general_metric( | ||
name='traffic (upload)', key='traffic', field_name='upload' | ||
) | ||
up.write(100) | ||
measurement = timeseries_db.read(key='traffic', fields='download')[0] | ||
self.assertEqual(measurement['download'], 200) | ||
measurement = timeseries_db.read(key='traffic', fields='upload')[0] | ||
self.assertEqual(measurement['upload'], 100) | ||
|
||
def test_object_same_key_different_fields(self): | ||
user = self._create_user() | ||
user_down = self._create_object_metric( | ||
name='traffic (download)', | ||
key='traffic', | ||
field_name='download', | ||
content_object=user, | ||
) | ||
user_down.write(200) | ||
user_up = self._create_object_metric( | ||
name='traffic (upload)', | ||
key='traffic', | ||
field_name='upload', | ||
content_object=user, | ||
) | ||
user_up.write(100) | ||
measurement = timeseries_db.read( | ||
key='traffic', fields='download', tags=user_down.tags | ||
)[0] | ||
self.assertEqual(measurement['download'], 200) | ||
measurement = timeseries_db.read( | ||
key='traffic', fields='upload', tags=user_up.tags | ||
)[0] | ||
self.assertEqual(measurement['upload'], 100) | ||
|
||
def test_get_query_1d(self): | ||
c = self._create_chart(test_data=None, configuration='uptime') | ||
q = c.get_query(time='1d') | ||
time_map = c.GROUP_MAP['1d'] | ||
self.assertIn( | ||
"{'range': {'points.time': {'from': 'now-1d/d', 'to': 'now/d'}}}", str(q) | ||
) | ||
self.assertIn(f"'fixed_interval': '{time_map}'", str(q)) | ||
|
||
def test_get_query_30d(self): | ||
c = self._create_chart(test_data=None, configuration='uptime') | ||
q = c.get_query(time='30d') | ||
time_map = c.GROUP_MAP['30d'] | ||
self.assertIn( | ||
"{'range': {'points.time': {'from': 'now-30d/d', 'to': 'now/d'}}}", str(q) | ||
) | ||
self.assertIn(f"'fixed_interval': '{time_map}'", str(q)) | ||
|
||
def test_retention_policy(self): | ||
manage_short_retention_policy() | ||
rp = timeseries_db.get_list_retention_policies() | ||
assert 'default' in rp | ||
assert SHORT_RP in rp | ||
days = f'{int(SHORT_RETENTION_POLICY.split("h")[0]) // 24}d' | ||
self.assertEqual( | ||
rp['short']['policy']['phases']['hot']['actions']['rollover']['max_age'], | ||
days, | ||
) | ||
|
||
def test_get_query(self): | ||
c = self._create_chart(test_data=False) | ||
m = c.metric | ||
params = dict( | ||
field_name=m.field_name, | ||
key=m.key, | ||
content_type=m.content_type_key, | ||
object_id=m.object_id, | ||
time=c.DEFAULT_TIME, | ||
) | ||
expected = timeseries_db.get_query( | ||
c.type, | ||
params, | ||
c.DEFAULT_TIME, | ||
c.GROUP_MAP, | ||
query=c.query, | ||
timezone=settings.TIME_ZONE, | ||
) | ||
self.assertEqual(c.get_query(), expected) | ||
|
||
def test_query_no_index(self): | ||
timeseries_db.delete_metric_data(key='ping') | ||
c = self._create_chart(test_data=False) | ||
q = c.get_query() | ||
self.assertEqual(timeseries_db.query(q, index='ping'), {}) | ||
self.assertEqual(timeseries_db.get_list_query(q), []) | ||
|
||
def test_1d_chart_data(self): | ||
c = self._create_chart() | ||
data = c.read(time='1d') | ||
self.assertIn('x', data) | ||
self.assertEqual(len(data['x']), 144) | ||
self.assertIn('traces', data) | ||
self.assertEqual(9.0, data['traces'][0][1][-1]) | ||
# Test chart with old data has same length | ||
m = self._create_general_metric(name='dummy') | ||
c = self._create_chart(metric=m, test_data=False) | ||
m.write(6.0, time=now() - timedelta(hours=23)) | ||
data = c.read(time='1d') | ||
self.assertIn('x', data) | ||
self.assertEqual(len(data['x']), 144) | ||
self.assertIn('traces', data) | ||
self.assertIn(6.0, data['traces'][0][1]) | ||
|
||
def test_delete_metric_data(self): | ||
obj = self._create_user() | ||
om = self._create_object_metric(name='Logins', content_object=obj) | ||
om.write(100) | ||
self.assertEqual(om.read()[0]['value'], 100) | ||
timeseries_db.delete_metric_data(key=om.key, tags=om.tags) | ||
|
||
def test_invalid_query(self): | ||
q = timeseries_db.default_chart_query() | ||
q['query']['nested']['query']['must'] = 'invalid' | ||
try: | ||
timeseries_db.validate_query(q) | ||
except ValidationError as e: | ||
self.assertIn('ParsingException: [bool] malformed query', str(e)) | ||
|
||
def test_non_aggregation_query(self): | ||
q = {'query': timeseries_db.default_chart_query()['query']} | ||
self.assertEqual(timeseries_db.get_list_query(q), []) | ||
|
||
def test_timestamp_precision(self): | ||
c = self._create_chart() | ||
points = timeseries_db.get_list_query(c.get_query(), precision='ms') | ||
self.assertIsInstance(points[0]['time'], float) | ||
points = timeseries_db.get_list_query(c.get_query(), precision='s') | ||
self.assertIsInstance(points[0]['time'], int) | ||
|
||
def create_docs_single_index(self): | ||
m = self._create_object_metric(name='dummy') | ||
m.write(1) | ||
d = self._create_device(organization=self._create_org()) | ||
m2 = self._create_object_metric(name='dummy', content_object=d) | ||
m2.write(1) | ||
self.assertEqual(len(timeseries_db.get_db.indices.get_alias(name='dummy')), 1) | ||
|
||
def test_additional_chart_operations_setting(self): | ||
modify_operators = { | ||
'upload': {'operator': '/', 'value': 1000000}, | ||
'download': {'operator': '/', 'value': 1000000}, | ||
} | ||
path = 'openwisp_monitoring.db.backends.elasticsearch.queries.ADDITIONAL_CHART_OPERATIONS' | ||
with patch.dict(path, modify_operators, clear=True): | ||
queries = reload(queries_module) | ||
self.assertEqual(queries.ADDITIONAL_CHART_OPERATIONS, modify_operators) | ||
self.assertEqual(queries.math_map['upload'], modify_operators['upload']) | ||
self.assertEqual(queries.math_map['download'], modify_operators['download']) | ||
|
||
def test_read(self): | ||
c = self._create_chart() | ||
data = c.read() | ||
key = c.metric.field_name | ||
self.assertIn('x', data) | ||
self.assertIn('traces', data) | ||
self.assertEqual(len(data['x']), 168) | ||
charts = data['traces'] | ||
self.assertEqual(charts[0][0], key) | ||
self.assertEqual(len(charts[0][1]), 168) | ||
self.assertTrue(all(elem in charts[0][1] for elem in [3, 6, 9])) | ||
|
||
def test_read_multiple(self): | ||
c = self._create_chart(test_data=None, configuration='multiple_test') | ||
m1 = c.metric | ||
m2 = self._create_object_metric( | ||
name='test metric 2', | ||
key='test_metric', | ||
field_name='value2', | ||
content_object=m1.content_object, | ||
) | ||
now_ = now() | ||
for n in range(0, 3): | ||
time = now_ - timedelta(days=n) | ||
m1.write(n + 1, time=time) | ||
m2.write(n + 2, time=time) | ||
data = c.read() | ||
f1 = m1.field_name | ||
f2 = 'value2' | ||
self.assertIn('x', data) | ||
self.assertIn('traces', data) | ||
self.assertEqual(len(data['x']), 168) | ||
charts = data['traces'] | ||
self.assertIn(f1, charts[0][0]) | ||
self.assertIn(f2, charts[1][0]) | ||
self.assertEqual(len(charts[0][1]), 168) | ||
self.assertEqual(len(charts[1][1]), 168) | ||
self.assertTrue(all(elem in charts[0][1] for elem in [3, 2, 1])) | ||
self.assertTrue(all(elem in charts[1][1] for elem in [4, 3, 2])) | ||
|
||
def test_ilm_disabled(self): | ||
with patch.object(timeseries_db, 'ilm_enabled', False): | ||
self.assertFalse(timeseries_db.ilm_enabled) | ||
self.assertIsNone( | ||
timeseries_db.create_or_alter_retention_policy(name='default') | ||
) | ||
self.assertIsNone(timeseries_db.get_list_retention_policies()) | ||
|
||
@patch.object(MetricDocument, 'get', side_effect=ElasticsearchException) | ||
def test_write_retry(self, mock_write): | ||
with self.assertRaises(TimeseriesWriteException): | ||
timeseries_db.write('test_write', {'value': 1}) | ||
m = self._create_general_metric(name='Test metric') | ||
with self.assertRaises(Retry): | ||
m.write(1) | ||
|
||
@patch.object(MetricDocument, 'get', side_effect=ElasticsearchException) | ||
def test_timeseries_write_params(self, mock_write): | ||
with freeze_time('Jan 14th, 2020') as frozen_datetime: | ||
m = self._create_general_metric(name='Test metric') | ||
with self.assertRaises(Retry) as e: | ||
m.write(1) | ||
frozen_datetime.tick(delta=timedelta(minutes=10)) | ||
self.assertEqual( | ||
now(), datetime(2020, 1, 14, tzinfo=tz('UTC')) + timedelta(minutes=10) | ||
) | ||
task_signature = e.exception.sig | ||
with patch.object(timeseries_db, 'write') as mock_write: | ||
self._retry_task(task_signature) | ||
mock_write.assert_called_with( | ||
'test_metric', | ||
{'value': 1}, | ||
database=None, | ||
retention_policy=None, | ||
tags={}, | ||
# this should be the original time at the moment of first failure | ||
timestamp='2020-01-14T00:00:00Z', | ||
) | ||
|
||
def _retry_task(self, task_signature): | ||
task_kwargs = task_signature.kwargs | ||
task_signature.type.run(**task_kwargs) |
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
Empty file.
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
Original file line number | Diff line number | Diff line change |
---|---|---|
@@ -0,0 +1,55 @@ | ||
from datetime import timedelta | ||
|
||
from django.utils.timezone import now | ||
from swapper import load_model | ||
|
||
from . import timeseries_db | ||
from .backends import load_backend_module | ||
|
||
Chart = load_model('monitoring', 'Chart') | ||
tests = load_backend_module(module='tests.client_tests') | ||
|
||
|
||
class TestDatabaseClient(tests.TestDatabaseClient): | ||
def test_is_aggregate_bug(self): | ||
m = self._create_object_metric(name='summary_avg') | ||
c = Chart(metric=m, configuration='dummy') | ||
self.assertFalse(timeseries_db._is_aggregate(c.query)) | ||
|
||
def test_is_aggregate_fields_function(self): | ||
m = self._create_object_metric(name='is_aggregate_func') | ||
c = Chart(metric=m, configuration='uptime') | ||
self.assertTrue(timeseries_db._is_aggregate(c.query)) | ||
|
||
def test_delete_metric_data(self): | ||
m = self._create_general_metric(name='test_metric') | ||
m.write(100) | ||
self.assertEqual(m.read()[0]['value'], 100) | ||
timeseries_db.delete_metric_data(key=m.key) | ||
self.assertEqual(m.read(), []) | ||
om = self._create_object_metric(name='dummy') | ||
om.write(50) | ||
m.write(100) | ||
self.assertEqual(m.read()[0]['value'], 100) | ||
self.assertEqual(om.read()[0]['value'], 50) | ||
timeseries_db.delete_metric_data() | ||
self.assertEqual(m.read(), []) | ||
self.assertEqual(om.read(), []) | ||
|
||
def test_read_order(self): | ||
timeseries_db.delete_metric_data() | ||
m = self._create_general_metric(name='dummy') | ||
m.write(40, time=now() - timedelta(days=2)) | ||
m.write(30) | ||
with self.subTest('Test ascending read order'): | ||
metric_data = m.read(limit=2, order='time') | ||
self.assertEqual(metric_data[0]['value'], 40) | ||
self.assertEqual(metric_data[1]['value'], 30) | ||
with self.subTest('Test descending read order'): | ||
metric_data = m.read(limit=2, order='-time') | ||
self.assertEqual(metric_data[0]['value'], 30) | ||
self.assertEqual(metric_data[1]['value'], 40) | ||
with self.subTest('Test invalid read order'): | ||
with self.assertRaises(timeseries_db.client_error) as e: | ||
metric_data = m.read(limit=2, order='invalid') | ||
self.assertIn('Invalid order "invalid" passed.', str(e)) |
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters