-
Notifications
You must be signed in to change notification settings - Fork 6
Expand file tree
/
Copy pathbench.py
More file actions
executable file
·117 lines (80 loc) · 2.68 KB
/
bench.py
File metadata and controls
executable file
·117 lines (80 loc) · 2.68 KB
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
#!/usr/bin/env python
"""
Compares redis, pipeline, and redpipe calls.
Over localhost socket.
Network efficiency is already as good as it will get.
Try this with your redis machine.
So far what I am seeing is that redpipe doesn't seem to add any
extra overhead over using redis pipelines.
But it is much easier to work with.
This doesn't compare actual impact of using futures and variable manipulation.
But I am going to bet that for most environments, the impact of network will
far far outweigh any other factor.
Usage
-----
run with `py.test`:
.. code-block:: bash
py.test ./bench.py
Experimenting with pytest.benchmark plugin:
http://pytest-benchmark.readthedocs.io/
Also experimenting with toxiproxy to simulate latency:
https://github.com/Shopify/toxiproxy
toxiproxy-cli create redis -l localhost:26379 -u localhost:6379
toxiproxy-cli toxic add redis -t latency -a latency=2
toxiproxy-cli list
toxiproxy-cli toxic remove redis -n latency_downstream
toxiproxy-cli delete redis
Then you can call this with py.test ./bench.py --port 26379
"""
import redis
import redislite
import redpipe
# setup. configure here.
# need to make these cli args.
KEY_COUNT = 100
CHUNK_SIZE = 10
def build_redis(port):
if port is None:
client = redislite.StrictRedis()
else:
client = redis.StrictRedis(port=int(port))
redpipe.reset()
redpipe.connect_redis(client)
return client
def values_iterator():
for i in range(0, KEY_COUNT):
j = i * CHUNK_SIZE
values = [("__test_%s" % v).encode('utf-8') for v in
range(j, j + CHUNK_SIZE)]
yield values
def bench(r, values):
results = []
for i in values:
key = 'key%s' % i
r.set(key, i)
results.append(r.get(key))
return results
def redispy_bench(redis_client):
for values in values_iterator():
results = bench(redis_client, values)
assert (results == values)
def redispipeline_bench(redis_client):
for values in values_iterator():
with redis_client.pipeline() as pipe:
bench(pipe, values)
results = [v for i, v in enumerate(pipe.execute()) if i % 2 == 1]
assert (results == values)
def redpipe_bench():
for values in values_iterator():
with redpipe.autoexec() as r:
results = bench(r, values)
assert (results == values)
def test_redispy(port, benchmark):
redis_client = build_redis(port)
benchmark(redispy_bench, redis_client=redis_client)
def test_pipeline(port, benchmark):
redis_client = build_redis(port)
benchmark(redispipeline_bench, redis_client=redis_client)
def test_redpipe(port, benchmark):
build_redis(port)
benchmark(redpipe_bench)