Here are the examples of the python api tests.mocks.EngineEmul taken from open source projects. By voting up you can indicate which examples are most useful and appropriate.
117 Examples
3
Example 1
Project: taurus Source File: test_pbench.py
def _get_pbench(self):
obj = PBenchExecutor()
obj.engine = EngineEmul()
obj.settings = BetterDict()
obj.engine.config = BetterDict()
return obj
3
Example 2
Project: taurus Source File: test_Siege.py
def test_check_install_exceptions(self):
obj = SiegeExecutor()
obj.engine = EngineEmul()
obj.settings.merge({
"path": '*',})
obj.execution.merge({
"concurrency": 2,
"hold-for": '2s',
"scenario": {}})
self.assertRaises(ToolError, obj.prepare)
3
Example 3
Project: taurus Source File: test_passFailStatus.py
def test_prepare3(self):
obj = PassFailStatus()
obj.engine = EngineEmul()
obj.parameters = {"criteria": ["avg-rt>10ms for 3s, continue as failed"]}
obj.prepare()
self.assertGreater(len(obj.criteria), 0)
for n in range(0, 10):
point = random_datapoint(n)
point[DataPoint.CURRENT][''][KPISet.AVG_RESP_TIME] = 1
obj.aggregated_second(point)
obj.check()
try:
obj.post_process()
self.fail()
except AutomatedShutdown:
pass
3
Example 4
Project: taurus Source File: test_pbench.py
def test_install_pbench(self):
obj = PBenchExecutor()
obj.engine = EngineEmul()
obj.settings = BetterDict()
obj.engine.config = BetterDict()
obj.settings.merge({"path": "/notexistent"})
# obj.execution = obj.engine.config['execution'][0]
try:
obj.prepare()
self.fail()
except ToolError as exc:
self.assertEquals("Please install PBench tool manually", str(exc))
3
Example 5
Project: taurus Source File: test_passFailStatus.py
def test_passfail_crash(self):
passfail = BetterDict()
passfail.merge({
"module": "passfail",
"criteria": [
"fail>10% within 5s",
]
})
obj = PassFailStatus()
obj.engine = EngineEmul()
obj.parameters = passfail
obj.engine.config.merge({
"services": [passfail],
})
obj.prepare()
self.assertTrue(all(isinstance(obj, dict) for obj in passfail["criteria"]))
self.assertTrue(all(isinstance(obj, dict) for obj in passfail["criterias"]))
3
Example 6
Project: taurus Source File: test_JUnitXMLReporter.py
def test_results_link_cloud(self):
obj = JUnitXMLReporter()
obj.engine = EngineEmul()
obj.engine.provisioning = CloudProvisioning()
obj.engine.provisioning.client = BlazeMeterClientEmul(obj.log)
prov = obj.engine.provisioning
prov.client.results_url = 'url1'
prov.settings.merge({'test': 'test1'})
report_info = obj.get_bza_report_info()
self.assertEqual(report_info, [('Cloud report link: url1\n', 'test1')])
3
Example 7
Project: taurus Source File: test_services.py
def test_no_work_prov(self):
obj = Service()
obj.engine = EngineEmul()
obj.engine.config[Provisioning.PROV] = 'cloud'
self.assertFalse(obj.should_run())
obj.parameters['run-at'] = 'cloud'
self.assertTrue(obj.should_run())
3
Example 8
Project: taurus Source File: test_FinalStatusReporter.py
def test_func_report(self):
obj = FinalStatus()
obj.engine = EngineEmul()
obj.parameters = BetterDict()
log_recorder = RecordingHandler()
obj.log.addHandler(log_recorder)
obj.prepare()
obj.aggregated_results(*self.__get_func_tree())
obj.post_process()
info_log = log_recorder.info_buff.getvalue()
self.assertIn("Total: 3 tests", info_log)
self.assertIn("Test TestClass.case2", info_log)
self.assertIn("stacktrace2", info_log)
self.assertIn("Test TestClass.case3", info_log)
self.assertIn("stacktrace3", info_log)
obj.log.removeHandler(log_recorder)
3
Example 9
def setUp(self):
super(TestCLI, self).setUp()
self.log = os.path.join(os.path.dirname(__file__), "..", "build", "bzt.log")
self.verbose = True
self.no_system_configs = True
self.option = []
self.datadir = os.path.join(os.path.dirname(__file__), "..", "build", "acli")
self.obj = CLI(self)
self.aliases = []
self.obj.engine = EngineEmul()
3
Example 10
Project: taurus Source File: test_Gatling.py
def getGatling(self):
path = os.path.abspath(__dir__() + "/../gatling/gatling" + EXE_SUFFIX)
obj = GatlingExecutor()
obj.engine = EngineEmul()
obj.settings.merge({"path": path})
return obj
3
Example 11
Project: taurus Source File: test_pbench.py
def test_schedule_concurrency_steps(self):
executor = PBenchExecutor()
executor.engine = EngineEmul()
executor.execution.merge({"concurrency": 5, "ramp-up": 10, "steps": 3})
obj = Scheduler(executor.get_load(), io.BytesIO(b("5 test1\ntest1\n5 test2\ntest2\n")), logging.getLogger(""))
items = list(obj.generate())
self.assertEqual(8, len(items))
self.assertEqual(-1, items[5][0]) # instance became unlimited
self.assertEqual(Scheduler.REC_TYPE_LOOP_START, items[6][5]) # looped payload
3
Example 12
Project: taurus Source File: test_passFailStatus.py
def test_cuemulative_criteria_post_process(self):
obj = PassFailStatus()
obj.engine = EngineEmul()
obj.parameters = {"criteria": [
"p90>0ms, continue as failed",
"avg-rt>0ms, continue as failed",
]}
obj.prepare()
self.assertEquals(len(obj.criteria), 2)
for n in range(0, 10):
point = random_datapoint(n)
obj.aggregated_second(point)
obj.check()
obj.shutdown()
self.assertRaises(AutomatedShutdown, obj.post_process)
for crit in obj.criteria:
self.assertTrue(crit.is_triggered)
3
Example 13
Project: taurus Source File: test_Grinder.py
def test_with_results(self):
obj = GrinderExecutor()
obj.engine = EngineEmul()
obj.settings.merge({'path': __dir__() + "/../grinder/fake_grinder.jar"})
obj.execution.merge({
"concurrency": {"local": 2},
"scenario": {"script": __dir__() + "/../grinder/helloworld.py"}})
obj.prepare()
obj.engine.prepared = [obj]
obj.engine.started = [obj]
prov = Local()
prov.engine = obj.engine
prov.executors = [obj]
obj.engine.provisioning = prov
obj.reader.buffer = ['some info']
obj.engine.provisioning.post_process()
3
Example 14
Project: taurus Source File: test_pbench.py
def test_pbench_script(self):
obj = PBenchExecutor()
obj.engine = EngineEmul()
obj.settings = BetterDict()
obj.engine.config = BetterDict()
obj.engine.config.merge({
ScenarioExecutor.EXEC: {
"executor": "pbench",
"scenario": {"script": __dir__() + "/../data/pbench.src"}
},
"provisioning": "test"
})
obj.execution = obj.engine.config['execution']
obj.settings.merge({
"path": os.path.join(os.path.dirname(__file__), '..', "phantom.sh"),
})
obj.prepare()
3
Example 15
Project: taurus Source File: test_FinalStatusReporter.py
def test_log_messages_duration(self):
"""
Test duration report
:return:
"""
obj = FinalStatus()
obj.engine = EngineEmul()
obj.parameters = BetterDict()
log_recorder = RecordingHandler()
obj.log.addHandler(log_recorder)
obj.prepare()
obj.start_time -= 120005
obj.post_process()
self.assertEqual("Test duration: 1 day, 9:20:05\n", log_recorder.info_buff.getvalue())
obj.log.removeHandler(log_recorder)
3
Example 16
Project: taurus Source File: test_proxy2jmx.py
def test_existing_proxy(self):
obj = Proxy2JMXEmul()
obj.api_delay = 1
obj.responses = [
ResponseEmul(200, '{"result" : {"port": "port1", "host": "host1", "status": "active"}}'),
ResponseEmul(200, ''), # stopRecording
ResponseEmul(200, '') # clearRecording
]
obj.engine = EngineEmul()
obj.engine.config.merge({
'modules': {
'recorder': {
'token': '123'}}})
obj.settings = obj.engine.config.get('modules').get('recorder')
obj.prepare()
self.assertEqual(obj.proxy, 'http://host1:port1')
3
Example 17
Project: taurus Source File: test_monitoring.py
def test_local_with_engine(self):
config = {'metrics': ['cpu', 'engine-loop']}
obj = LocalClient(logging.getLogger(''), 'label', config)
obj.engine = EngineEmul()
obj.connect()
data = obj.get_data()
self.assertTrue(all('source' in item.keys() and 'ts' in item.keys() for item in data))
return data
3
Example 18
def test_iter(self):
obj = SiegeExecutor()
obj.engine = EngineEmul()
obj.settings.merge({
"path": get_res_path(TOOL_NAME),})
obj.execution.merge({
"concurrency": 2,
"iterations": 3,
"scenario": {
"think-time": "1s",
"requests": ["http://blazedemo.com",
"http://ya.ru"]}
})
obj.prepare()
obj.get_widget()
obj.startup()
3
Example 19
Project: taurus Source File: test_ApacheBenchmark.py
def test_no_request_exception(self):
"Checks that executor.startup fails if there's no request specified."
obj = ApacheBenchmarkExecutor()
obj.engine = EngineEmul()
obj.settings.merge({
"path": get_res_path(TOOL_NAME),})
obj.execution.merge({
"scenario": {}})
obj.prepare()
self.assertRaises(TaurusConfigError, obj.startup)
3
Example 20
Project: taurus Source File: test_Tsung.py
def setUp(self):
self.obj = TsungExecutor()
self.obj.engine = EngineEmul()
self.obj.settings = BetterDict()
self.obj.settings.merge({"path": get_res_path(TOOL_NAME),})
self.obj.execution = BetterDict()
3
Example 21
Project: taurus Source File: test_passFailStatus.py
def test_named_criteria(self):
obj = PassFailStatus()
obj.engine = EngineEmul()
obj.parameters = {"criteria": {"named criterion": "avg-rt of spaced label>10ms"}}
obj.prepare()
self.assertGreater(len(obj.criteria), 0)
self.assertEquals(obj.criteria[0].message, "named criterion")
3
Example 22
Project: taurus Source File: test_passFailStatus.py
def test_ashort_data(self):
obj = PassFailStatus()
obj.engine = EngineEmul()
crit_cfg = DataCriterion.string_to_config("failures>0%, stop as failed")
obj.criteria.append(DataCriterion(crit_cfg, obj))
point = DataPoint(0)
point[DataPoint.CUMULATIVE] = {}
point[DataPoint.CUMULATIVE][''] = {}
point[DataPoint.CUMULATIVE][''][KPISet.FAILURES] = 100 * 16
point[DataPoint.CUMULATIVE][''][KPISet.SAMPLE_COUNT] = 100 * 16
obj.check()
obj.shutdown()
obj.aggregated_second(point)
self.assertRaises(AutomatedShutdown, obj.post_process)
3
Example 23
Project: taurus Source File: test_blazeMeterUploader.py
def test_monitoring_buffer_limit_option(self):
obj = BlazeMeterUploader()
obj.engine = EngineEmul()
obj.client = BlazeMeterClientEmul(logging.getLogger(''))
obj.client.results.append({"marker": "ping", 'result': {}})
obj.settings["monitoring-buffer-limit"] = 100
obj.prepare()
for i in range(1000):
mon = [{"ts": i, "source": "local", "cpu": float(i) / 1000 * 100, "mem": 2, "bytes-recv": 100, "other": 0}]
obj.monitoring_data(mon)
for source, buffer in iteritems(obj.monitoring_buffer.data):
self.assertLessEqual(len(buffer), 100)
self.assertEqual(0, len(obj.client.results))
3
Example 24
Project: taurus Source File: test_passFailStatus.py
def test_percentiles_track(self):
obj = PassFailStatus()
obj.engine = EngineEmul()
obj.parameters = {"criteria": ["p90>0ms"]}
obj.prepare()
self.assertGreater(len(obj.criteria), 0)
for n in range(0, 10):
point = random_datapoint(n)
obj.aggregated_second(point)
obj.check()
obj.shutdown()
try:
obj.post_process()
self.fail()
except AutomatedShutdown:
pass
3
Example 25
Project: taurus Source File: test_Grinder.py
def test_grinder_widget(self):
obj = GrinderExecutor()
obj.engine = EngineEmul()
obj.settings.merge({'path': __dir__() + "/../grinder/fake_grinder.jar"})
obj.engine.config.merge({"provisioning": 'local'})
obj.execution.merge({"concurrency": {"local": 2},
"ramp-up": 2,
"hold-for": 2,
"scenario": {"script": __dir__() + "/../grinder/helloworld.py"}})
obj.prepare()
obj.get_widget()
self.assertEqual(obj.widget.widgets[0].text, "Script: helloworld.py")
3
Example 26
Project: taurus Source File: test_pbench.py
def test_schedule_concurrency(self):
executor = PBenchExecutor()
executor.engine = EngineEmul()
executor.execution.merge({"concurrency": 5, "ramp-up": 10, "hold-for": 5})
obj = Scheduler(executor.get_load(), io.BytesIO(b("5 test1\ntest1\n5 test2\ntest2\n")), logging.getLogger(""))
items = list(obj.generate())
self.assertEqual(8, len(items))
self.assertEqual(-1, items[5][0]) # instance became unlimited
self.assertEqual(Scheduler.REC_TYPE_LOOP_START, items[6][5]) # looped payload
3
Example 27
Project: taurus Source File: test_pbench.py
def test_schedule_with_no_rampup(self):
executor = PBenchExecutor()
executor.engine = EngineEmul()
executor.execution.merge({"concurrency": 10, "ramp-up": None, "steps": 3, "hold-for": 10})
# this line shouln't throw an exception
obj = Scheduler(executor.get_load(), io.BytesIO(b("4 test\ntest\n")), logging.getLogger(""))
3
Example 28
Project: taurus Source File: test_Grinder.py
def test_fail_on_zero_results(self):
obj = GrinderExecutor()
obj.engine = EngineEmul()
obj.settings.merge({'path': __dir__() + "/../grinder/fake_grinder.jar"})
obj.execution.merge({"concurrency": {"local": 2},
"scenario": {"script": __dir__() + "/../grinder/helloworld.py"}})
obj.prepare()
obj.engine.prepared = [obj]
obj.engine.started = [obj]
obj.engine.provisioning = Local()
obj.engine.provisioning.engine = obj.engine
obj.engine.provisioning.executors = [obj]
self.assertRaises(ToolError, obj.engine.provisioning.post_process)
3
Example 29
Project: taurus Source File: test_pbench.py
def test_same_address_port(self):
obj = PBenchExecutor()
obj.engine = EngineEmul()
obj.settings = BetterDict()
obj.engine.config = BetterDict()
obj.engine.config.merge(yaml.load(open(__dir__() + "/../yaml/phantom_request_same_address.yml").read()))
obj.execution = obj.engine.config['execution'][0]
obj.settings.merge({
"path": os.path.join(os.path.dirname(__file__), '..', "phantom.sh"),
})
self.assertRaises(TaurusConfigError, obj.prepare)
3
Example 30
Project: taurus Source File: test_FinalStatusReporter.py
def test_log_messages_samples_count(self):
obj = FinalStatus()
obj.engine = EngineEmul()
obj.parameters = BetterDict()
log_recorder = RecordingHandler()
obj.log.addHandler(log_recorder)
obj.parameters.merge({"failed-labels": False, "percentiles": False, "summary": True, "test-duration": False})
obj.aggregated_second(self.__get_datapoint())
obj.post_process()
self.assertEqual("Samples count: 59314, 50.00% failures\n", log_recorder.info_buff.getvalue())
obj.log.removeHandler(log_recorder)
3
Example 31
Project: taurus Source File: test_pbench.py
def test_pbench_file_lister(self):
obj = PBenchExecutor()
obj.engine = EngineEmul()
obj.settings = BetterDict()
obj.engine.config = BetterDict()
obj.engine.config.merge(
{ScenarioExecutor.EXEC: {"executor": "pbench", "scenario": {"script": "/opt/data/script.src"}}})
obj.execution = obj.engine.config['execution']
obj.settings.merge({
"path": os.path.join(os.path.dirname(__file__), '..', "phantom.sh"),
})
resource_files = obj.resource_files()
self.assertEqual(1, len(resource_files))
self.assertEqual(resource_files[0], 'script.src')
3
Example 32
Project: taurus Source File: test_Grinder.py
def test_requests(self):
obj = GrinderExecutor()
obj.engine = EngineEmul()
obj.settings.merge({'path': __dir__() + "/../grinder/fake_grinder.jar"})
obj.execution.merge({"scenario": {"requests": ['http://blazedemo.com']}})
obj.prepare()
3
Example 33
Project: taurus Source File: test_pbench.py
def test_pbench_payload_py3_crash(self):
obj = PBenchExecutor()
obj.engine = EngineEmul()
obj.settings = BetterDict()
obj.engine.config = BetterDict()
obj.engine.config.merge({
ScenarioExecutor.EXEC: {
"executor": "pbench",
"scenario": {"requests": ["test%d" % i for i in range(20)]}
},
"provisioning": "test",
})
obj.execution = obj.engine.config['execution']
obj.settings.merge({
"path": os.path.join(os.path.dirname(__file__), '..', "phantom.sh"),
})
obj.prepare()
3
Example 34
Project: taurus Source File: test_ApacheBenchmark.py
def test_no_apache_benchmark(self):
"Checks that prepare() fails if ApacheBenchmark is not installed."
obj = ApacheBenchmarkExecutor()
obj.engine = EngineEmul()
obj.settings.merge({
"path": '*',})
obj.execution.merge({
"scenario": {
"requests": ["http://blazedemo.com"]
}})
self.assertRaises(ToolError, obj.prepare)
3
Example 35
def test_no_token(self):
obj = Proxy2JMXEmul()
obj.engine = EngineEmul()
obj.engine.config.merge({})
obj.settings = obj.engine.config.get('recorder')
self.assertRaises(TaurusConfigError, obj.prepare)
3
Example 36
Project: taurus Source File: test_JUnitXMLReporter.py
def test_results_link_blazemeter(self):
obj = JUnitXMLReporter()
obj.engine = EngineEmul()
obj.engine.provisioning = Local()
obj.engine.reporters.append(BlazeMeterUploader())
obj.engine.provisioning.client = BlazeMeterClientEmul(obj.log)
rep = obj.engine.reporters[0]
rep.client.results_url = 'url2'
rep.parameters.merge({'test': 'test2'})
report_info = obj.get_bza_report_info()
self.assertEqual(report_info, [('BlazeMeter report link: url2\n', 'test2')])
3
Example 37
def setUp(self):
super(SeleniumTestCase, self).setUp()
engine_obj = EngineEmul()
paths = [__dir__() + "/../../bzt/10-base.json", local_paths_config()]
engine_obj.configure(paths) # FIXME: avoid using whole engine in particular module test!
self.obj = SeleniumExecutor()
self.obj.settings = engine_obj.config.get("modules").get("selenium")
self.obj.settings.merge({"virtual-display": {"width": 1024, "height": 768}})
engine_obj.create_artifacts_dir(paths)
self.obj.engine = engine_obj
3
Example 38
Project: taurus Source File: test_FinalStatusReporter.py
def test_dump(self):
obj = FinalStatus()
obj.engine = EngineEmul()
obj.parameters = BetterDict()
log_recorder = RecordingHandler()
obj.log.addHandler(log_recorder)
obj.parameters.merge({
"dump-xml": obj.engine.create_artifact("status", ".xml"),
"dump-csv": obj.engine.create_artifact("status", ".csv")
})
obj.aggregated_second(random_datapoint(time.time()))
obj.post_process()
self.assertIn("XML", log_recorder.info_buff.getvalue())
3
Example 39
Project: taurus Source File: test_shellexec.py
def setUp(self):
self.obj = ShellExecutor()
self.obj.parameters = BetterDict()
self.obj.engine = EngineEmul()
self.obj.engine.config.merge({"provisioning": "local"})
self.obj.engine.default_cwd = os.getcwd()
self.log_recorder = RecordingHandler()
self.obj.log.addHandler(self.log_recorder)
3
Example 40
Project: taurus Source File: test_passFailStatus.py
def test_prepare2(self):
obj = PassFailStatus()
obj.engine = EngineEmul()
obj.parameters = {"criteria": ["avg-rt>10ms, continue as non-failed"]}
obj.prepare()
self.assertGreater(len(obj.criteria), 0)
for n in range(0, 10):
point = random_datapoint(n)
obj.aggregated_second(point)
obj.check()
obj.post_process()
3
Example 41
def test_url_exceptions(self):
obj = SiegeExecutor()
obj.engine = EngineEmul()
obj.settings.merge({
"path": get_res_path(TOOL_NAME),})
obj.execution.merge({
"concurrency": 2,
"hold-for": '2s',
"scenario": {}})
self.assertRaises(TaurusConfigError, obj.prepare)
3
Example 42
Project: taurus Source File: test_passFailStatus.py
def test_widget(self):
obj = PassFailStatus()
obj.engine = EngineEmul()
obj.parameters = {"criteria": ["avg-rt>10ms for 2s, continue as failed"]}
obj.prepare()
obj.get_widget()
start_time = time.time()
for _n in range(0, 10):
point = random_datapoint(start_time)
point[DataPoint.CURRENT]['']["avg_rt"] = 1.0
obj.aggregated_second(point)
obj.check()
start_time += 1
self.assertEqual(obj.widget.text_widget.text, "Failed: avg-rt>10ms for 10 sec\n")
3
Example 43
Project: taurus Source File: test_Siege.py
def test_repetition_exceptions(self):
obj = SiegeExecutor()
obj.engine = EngineEmul()
obj.settings.merge({
"path": get_res_path(TOOL_NAME),})
obj.execution.merge({
"concurrency": 2,
"scenario": {
"requests": ["http://blazedemo.com",
"http://ya.ru"]}})
obj.prepare()
self.assertEqual(len(obj.resource_files()), 0)
self.assertRaises(TaurusConfigError, obj.startup)
3
Example 44
Project: taurus Source File: test_FinalStatusReporter.py
def test_func_report_all_no_stacktrace(self):
obj = FinalStatus()
obj.engine = EngineEmul()
obj.parameters = BetterDict()
log_recorder = RecordingHandler()
obj.log.addHandler(log_recorder)
obj.parameters.merge({"report-tests": "all", "print-stacktrace": False})
obj.prepare()
obj.aggregated_results(*self.__get_func_tree())
obj.post_process()
info_log = log_recorder.info_buff.getvalue()
self.assertIn("Total: 3 tests", info_log)
self.assertIn("Test TestClass.case1 - PASSED", info_log)
self.assertIn("Test TestClass.case2 - FAILED", info_log)
self.assertIn("Test TestClass.case3 - BROKEN", info_log)
self.assertNotIn("stacktrace2", info_log)
self.assertNotIn("stacktrace3", info_log)
obj.log.removeHandler(log_recorder)
3
Example 45
Project: taurus Source File: test_FinalStatusReporter.py
def test_log_messages_failed_labels(self):
obj = FinalStatus()
obj.engine = EngineEmul()
obj.parameters = BetterDict()
log_recorder = RecordingHandler()
obj.log.addHandler(log_recorder)
obj.parameters.merge({"failed-labels": True, "percentiles": False, "summary": False, "test-duration": False})
obj.aggregated_second(self.__get_datapoint())
obj.post_process()
self.assertIn("29656 failed samples: http://192.168.1.1/anotherquery\n", log_recorder.info_buff.getvalue())
obj.log.removeHandler(log_recorder)
3
Example 46
Project: taurus Source File: test_Grinder.py
def test_resource_files_collection_basic(self):
obj = GrinderExecutor()
obj.engine = EngineEmul()
obj.execution.merge({"scenario": {"script": __dir__() + "/../grinder/helloworld.py"}})
res_files = obj.resource_files()
self.assertEqual(len(res_files), 1)
3
Example 47
Project: taurus Source File: test_pbench.py
def test_schedule_empty(self):
executor = PBenchExecutor()
executor.engine = EngineEmul()
# concurrency: 1, iterations: 1
obj = Scheduler(executor.get_load(), io.BytesIO(b("4 test\ntest\n")), logging.getLogger(""))
items = list(obj.generate())
for item in items:
logging.debug("Item: %s", item)
self.assertEqual(1, len(items))
0
Example 48
Project: taurus Source File: test_ApacheBenchmark.py
def test_full_execution(self):
obj = ApacheBenchmarkExecutor()
obj.engine = EngineEmul()
obj.settings.merge({
"path": get_res_path(TOOL_NAME),})
obj.execution.merge({
"concurrency": 2,
"iterations": 3,
"headers": {
"Content-Type": "text/plain"
},
"scenario": {
"keepalive": True,
"requests": [
{
"url": "http://blazedemo.com",
"headers": [
{"X-Answer": "42"},
],
"keepalive": False,
"method": "GET",
}
],
}
})
obj.prepare()
obj.get_widget()
try:
obj.startup()
while not obj.check():
time.sleep(obj.engine.check_interval)
finally:
obj.shutdown()
obj.post_process()
self.assertNotEquals(obj.process, None)
0
Example 49
Project: taurus Source File: test_ApacheBenchmark.py
def test_iter(self):
"Ensures that executor doesn't fail with minimal configuration."
obj = ApacheBenchmarkExecutor()
obj.engine = EngineEmul()
obj.settings.merge({
"path": get_res_path(TOOL_NAME),})
obj.execution.merge({
"scenario": {
"requests": ["http://blazedemo.com"]
}
})
obj.prepare()
obj.get_widget()
try:
obj.startup()
while not obj.check():
time.sleep(obj.engine.check_interval)
finally:
obj.shutdown()
obj.post_process()
self.assertNotEquals(obj.process, None)
0
Example 50
Project: taurus Source File: test_ApacheBenchmark.py
def test_non_get_request_exception(self):
"""
Checks that executor.startup fails if
request with non-GET method is specified.
"""
obj = ApacheBenchmarkExecutor()
obj.engine = EngineEmul()
obj.settings.merge({
"path": get_res_path(TOOL_NAME),})
obj.execution.merge({
"scenario": {
"requests": [
{
"url": "http://blazedemo.com",
"method": "POST",
}
]
}})
obj.prepare()
self.assertRaises(TaurusConfigError, obj.startup)