I am planning to test my web server for performance using locust, The problem with the below code is that it is only giving me the average times
My basic code is as follows
from locust import HttpLocust, TaskSet, task, events, web
def index(l):
l.client.get("/")
def stats(l):
l.client.get("/stats/requests")
class MyTaskSet(TaskSet):
tasks = [index,stats]
class MyLocust(HttpLocust):
host = "http://127.0.0.1:8089"
min_wait = 2000
max_wait = 5000
task_set = MyTaskSet
request_success_stats = [list()]
request_fail_stats = [list()]
def __init__(self):
super(MyLocust, self).__init__()
locust.events.request_success += self.hook_request_success
locust.events.request_failure += self.hook_request_fail
locust.events.quitting += self.hook_locust_quit
def hook_request_success(self, request_type, name, response_time, response_length):
self.request_success_stats.append([name, request_type, response_time, response_length])
def hook_request_fail(self, request_type, name, response_time, exception):
self.request_fail_stats.append([name, request_type, response_time, exception])
def hook_locust_quit(self):
self.save_success_stats()
def save_success_stats(self):
import csv
with open('success_req_stats.csv', 'wb') as csv_file:
writer = csv.writer(csv_file)
for value in self.request_success_stats:
writer.writerow(value)
This is measuring the actual response times.
The catch here is that if the payload has multiple URI's then the response time for each of them will be recorded, which has to be in turn sorted/the duplicates removed while recording the results
If you love us? You can donate to us via Paypal or buy me a coffee so we can maintain and grow! Thank you!
Donate Us With