本文整理汇总了Python中bzt.modules.blazemeter.BlazeMeterUploader.monitoring_data方法的典型用法代码示例。如果您正苦于以下问题:Python BlazeMeterUploader.monitoring_data方法的具体用法?Python BlazeMeterUploader.monitoring_data怎么用?Python BlazeMeterUploader.monitoring_data使用的例子?那么恭喜您, 这里精选的方法代码示例或许可以为您提供帮助。您也可以进一步了解该方法所在类bzt.modules.blazemeter.BlazeMeterUploader
的用法示例。
在下文中一共展示了BlazeMeterUploader.monitoring_data方法的6个代码示例,这些例子默认根据受欢迎程度排序。您可以为喜欢或者感觉有用的代码点赞,您的评价将有助于系统推荐出更棒的Python代码示例。
示例1: test_monitoring_buffer_limit_option
# 需要导入模块: from bzt.modules.blazemeter import BlazeMeterUploader [as 别名]
# 或者: from bzt.modules.blazemeter.BlazeMeterUploader import monitoring_data [as 别名]
def test_monitoring_buffer_limit_option(self):
obj = BlazeMeterUploader()
obj.engine = EngineEmul()
mock = BZMock(obj._user)
obj.settings["monitoring-buffer-limit"] = 100
obj.prepare()
for i in range(1000):
mon = [{"ts": i, "source": "local", "cpu": float(i) / 1000 * 100, "mem": 2, "bytes-recv": 100, "other": 0}]
obj.monitoring_data(mon)
for source, buffer in iteritems(obj.monitoring_buffer.data):
self.assertLessEqual(len(buffer), 100)
self.assertEqual(1, len(mock.requests))
示例2: test_monitoring_buffer_limit_option
# 需要导入模块: from bzt.modules.blazemeter import BlazeMeterUploader [as 别名]
# 或者: from bzt.modules.blazemeter.BlazeMeterUploader import monitoring_data [as 别名]
def test_monitoring_buffer_limit_option(self):
obj = BlazeMeterUploader()
obj.engine = EngineEmul()
obj.client = BlazeMeterClientEmul(logging.getLogger(''))
obj.client.results.append({"marker": "ping", 'result': {}})
obj.settings["monitoring-buffer-limit"] = 100
obj.prepare()
for i in range(1000):
mon = [{"ts": i, "source": "local", "cpu": float(i) / 1000 * 100, "mem": 2, "bytes-recv": 100, "other": 0}]
obj.monitoring_data(mon)
for source, buffer in iteritems(obj.monitoring_buffer.data):
self.assertLessEqual(len(buffer), 100)
示例3: test_check
# 需要导入模块: from bzt.modules.blazemeter import BlazeMeterUploader [as 别名]
# 或者: from bzt.modules.blazemeter.BlazeMeterUploader import monitoring_data [as 别名]
def test_check(self):
client = BlazeMeterClientEmul(logging.getLogger(''))
client.results.append({"marker": "ping", 'result': {}})
client.results.append({"marker": "projects", 'result': []})
client.results.append({"marker": "project-create", 'result': {
"id": time.time(),
"name": "boo",
"userId": time.time(),
"description": None,
"created": time.time(),
"updated": time.time(),
"organizationId": None
}})
client.results.append({"marker": "tests", 'result': {}})
client.results.append({"marker": "test-create", 'result': {'id': 'unittest1'}})
client.results.append(
{"marker": "sess-start", 'result': {'session': {'id': 'sess1', 'userId': 1}, 'signature': ''}})
client.results.append({"marker": "first push", 'result': {'session': {}}})
# client.results.append(None) # first check error stats
client.results.append({"marker": "mon push", "result": True})
client.results.append({"marker": "second push", 'result': {'session': {"statusCode": 140, 'status': 'ENDED'}}})
# client.results.append(None) # second check error stats
client.results.append({"marker": "post-proc push", 'result': {'session': {}}})
client.results.append({"marker": "upload1", "result": True}) # post-proc error stats
client.results.append({"marker": "terminate", 'result': {'session': {}}})
obj = BlazeMeterUploader()
obj.parameters['project'] = 'Proj name'
obj.settings['token'] = '123'
obj.settings['browser-open'] = 'none'
obj.engine = EngineEmul()
shutil.copy(__file__, obj.engine.artifacts_dir + os.path.basename(__file__))
obj.client = client
obj.prepare()
obj.startup()
for x in range(0, 31):
obj.aggregated_second(random_datapoint(x))
mon = [{"ts": 1, "source": "local", "cpu": 1, "mem": 2, "bytes-recv": 100, "other": 0}]
obj.monitoring_data(mon)
obj.check()
for x in range(32, 65):
obj.aggregated_second(random_datapoint(x))
try:
obj.check()
self.fail()
except KeyboardInterrupt:
pass
obj.aggregated_second(random_datapoint(10))
obj.shutdown()
obj.post_process()
示例4: test_multiple_reporters_one_monitoring
# 需要导入模块: from bzt.modules.blazemeter import BlazeMeterUploader [as 别名]
# 或者: from bzt.modules.blazemeter.BlazeMeterUploader import monitoring_data [as 别名]
def test_multiple_reporters_one_monitoring(self):
obj1 = BlazeMeterUploader()
obj1.engine = EngineEmul()
BZMock(obj1._user)
obj2 = BlazeMeterUploader()
obj2.engine = EngineEmul()
BZMock(obj2._user)
obj1.prepare()
obj2.prepare()
for i in range(10):
mon = [{"ts": i, "source": "local", "cpu": float(i) / 1000 * 100, "mem": 2, "bytes-recv": 100, "other": 0}]
obj1.monitoring_data(mon)
obj2.monitoring_data(mon)
示例5: test_check
# 需要导入模块: from bzt.modules.blazemeter import BlazeMeterUploader [as 别名]
# 或者: from bzt.modules.blazemeter.BlazeMeterUploader import monitoring_data [as 别名]
def test_check(self):
mock = BZMock()
mock.mock_get.update({
'https://a.blazemeter.com/api/v4/tests?workspaceId=1&name=Taurus+Test': {"result": []},
'https://a.blazemeter.com/api/v4/tests?projectId=1&name=Taurus+Test': {"result": []},
'https://a.blazemeter.com/api/v4/projects?workspaceId=1&name=Proj+name': {"result": []},
})
mock.mock_post.update({
'https://a.blazemeter.com/api/v4/projects': {"result": {
"id": 1,
"name": "boo",
"userId": 2,
"description": None,
"created": time.time(),
"updated": time.time(),
"organizationId": None
}},
'https://a.blazemeter.com/api/v4/tests': {"result": {'id': 1}},
'https://a.blazemeter.com/api/v4/tests/1/start-external': {"result": {
'session': {'id': 1, 'userId': 1, 'testId': 1},
'master': {'id': 1, 'userId': 1},
'signature': 'sign'}},
'https://data.blazemeter.com/submit.php?session_id=1&signature=sign&test_id=1&user_id=1&pq=0&target=labels_bulk&update=1': [
{},
{"result": {'session': {"statusCode": 140, 'status': 'ENDED'}}},
{},
],
'https://data.blazemeter.com/api/v4/image/1/files?signature=sign': [
IOError("monitoring push expected fail"),
{"result": True},
{"result": True},
{"result": True},
{"result": True},
{"result": True},
{"result": True},
{"result": True},
{"result": True},
],
'https://a.blazemeter.com/api/v4/sessions/1/stop': {},
'https://data.blazemeter.com/submit.php?session_id=1&signature=sign&test_id=1&user_id=1&pq=0&target=engine_health&update=1':
{"result": {'session': {}}}
})
obj = BlazeMeterUploader()
obj.parameters['project'] = 'Proj name'
obj.settings['token'] = '123'
obj.settings['browser-open'] = 'none'
obj.engine = EngineEmul()
shutil.copy(__file__, os.path.join(obj.engine.artifacts_dir, os.path.basename(__file__)))
mock.apply(obj._user)
obj._user.timeout = 0.1
obj.prepare()
obj.startup()
for x in range(0, 31):
obj.aggregated_second(random_datapoint(x))
mon = [{"ts": 1, "source": "local", "cpu": 1, "mem": 2, "bytes-recv": 100, "other": 0}]
obj.monitoring_data(mon)
obj.check()
for x in range(32, 65):
obj.aggregated_second(random_datapoint(x))
obj.last_dispatch = time.time() - 2 * obj.send_interval
self.assertRaises(KeyboardInterrupt, obj.check)
obj.aggregated_second(random_datapoint(10))
obj.shutdown()
log_file = obj.engine.create_artifact('log', '.tmp')
handler = logging.FileHandler(log_file)
obj.engine.log.parent.addHandler(handler)
obj.engine.config.get('modules').get('shellexec').get('env')['TAURUS_INDEX_ALL'] = 1
obj.post_process()
self.assertEqual(20, len(mock.requests))
obj.engine.log.parent.removeHandler(handler)
示例6: test_check
# 需要导入模块: from bzt.modules.blazemeter import BlazeMeterUploader [as 别名]
# 或者: from bzt.modules.blazemeter.BlazeMeterUploader import monitoring_data [as 别名]
def test_check(self):
client = BlazeMeterClientEmul(logging.getLogger(''))
client.timeout = 1
client.results.append({"marker": "ping", 'result': {}})
client.results.append({"marker": "projects", 'result': []})
client.results.append({"marker": "project-create", 'result': {
"id": time.time(),
"name": "boo",
"userId": time.time(),
"description": None,
"created": time.time(),
"updated": time.time(),
"organizationId": None
}})
client.results.append({"marker": "tests", 'result': {}})
client.results.append({"marker": "test-create", 'result': {'id': 'unittest1'}})
client.results.append(
{"marker": "sess-start",
"result": {
'session': {'id': 'sess1', 'userId': 1},
'master': {'id': 'master1', 'userId': 1},
'signature': ''}})
client.results.append({"marker": "first push", 'result': {'session': {}}})
client.results.append(IOError("monitoring push expected fail"))
client.results.append({"marker": "mon push", "result": True})
client.results.append(IOError("custom metric push expected fail"))
client.results.append({"marker": "custom metrics push", "result": True})
client.results.append({"marker": "second push", 'result': {'session': {"statusCode": 140, 'status': 'ENDED'}}})
client.results.append({"marker": "post-proc push", 'result': {'session': {}}})
client.results.append({"marker": "post process monitoring push", "result": True})
client.results.append({"marker": "post process custom metrics push", "result": True})
client.results.append({"marker": "artifacts push", 'result': True})
client.results.append({"marker": "logs push", 'result': True})
client.results.append({"marker": "terminate", 'result': {'session': {}}})
obj = BlazeMeterUploader()
obj.parameters['project'] = 'Proj name'
obj.settings['token'] = '123'
obj.settings['browser-open'] = 'none'
obj.settings['send-custom-metrics'] = True
obj.settings['send-custom-tables'] = True
obj.engine = EngineEmul()
shutil.copy(__file__, obj.engine.artifacts_dir + os.path.basename(__file__))
obj.client = client
obj.prepare()
obj.startup()
for x in range(0, 31):
obj.aggregated_second(random_datapoint(x))
mon = [{"ts": 1, "source": "local", "cpu": 1, "mem": 2, "bytes-recv": 100, "other": 0},
{"ts": 1, "source": "chrome", "memory": 32, "cpu": 23}]
obj.monitoring_data(mon)
obj.check()
for x in range(32, 65):
obj.aggregated_second(random_datapoint(x))
self.assertRaises(KeyboardInterrupt, obj.check)
obj.aggregated_second(random_datapoint(10))
obj.shutdown()
log_file = obj.engine.create_artifact('log', '.tmp')
obj.engine.log.parent.handlers.append(logging.FileHandler(log_file))
obj.engine.config.get('modules').get('shellexec').get('env')['TAURUS_INDEX_ALL'] = 1
obj.post_process()
self.assertEqual(0, len(client.results))