Skip to content

Commit d26a3f5

Browse files
[fix] consumer groups for self-contained-coordinator should be broken by running_platform; [fix] SETS for testcases:build_variants, platforms, testcases:metric_context_path are missing (#29)
1 parent 5cdebfd commit d26a3f5

File tree

5 files changed

+62
-32
lines changed

5 files changed

+62
-32
lines changed

poetry.lock

+1-1
Some generated files are not rendered by default. Learn more about customizing how changed files appear on GitHub.

pyproject.toml

+2-2
Original file line numberDiff line numberDiff line change
@@ -1,6 +1,6 @@
11
[tool.poetry]
22
name = "redis-benchmarks-specification"
3-
version = "0.1.6"
3+
version = "0.1.7"
44
description = "The Redis benchmarks specification describes the cross-language/tools requirements and expectations to foster performance and observability standards around redis related technologies. Members from both industry and academia, including organizations and individuals are encouraged to contribute."
55
authors = ["filipecosta90 <[email protected]>"]
66
readme = "Readme.md"
@@ -16,7 +16,7 @@ argparse = "^1.4.0"
1616
Flask-HTTPAuth = "^4.4.0"
1717
PyYAML = "^5.4.1"
1818
docker = "^4.4.4"
19-
redisbench-admin = "^0.4.14"
19+
redisbench-admin = "^0.4.15"
2020
psutil = "^5.8.0"
2121
tox-docker = "^3.0.0"
2222

redis_benchmarks_specification/__self_contained_coordinator__/self_contained_coordinator.py

+18-27
Original file line numberDiff line numberDiff line change
@@ -25,7 +25,6 @@
2525
get_final_benchmark_config,
2626
)
2727
from redisbench_admin.utils.local import get_local_run_full_filename
28-
from redisbench_admin.utils.remote import get_overall_dashboard_keynames
2928
from redisbench_admin.utils.results import post_process_benchmark_results
3029

3130
from redis_benchmarks_specification.__common__.env import (
@@ -146,7 +145,8 @@ def main():
146145
exit(1)
147146

148147
logging.info("checking build spec requirements")
149-
build_runners_consumer_group_create(conn)
148+
running_platform = args.platform_name
149+
build_runners_consumer_group_create(conn, running_platform)
150150
stream_id = None
151151
docker_client = docker.from_env()
152152
home = str(Path.home())
@@ -166,31 +166,38 @@ def main():
166166
rts,
167167
testsuite_spec_files,
168168
topologies_map,
169-
args.platform_name,
169+
running_platform,
170170
)
171171

172172

173-
def build_runners_consumer_group_create(conn, id="$"):
173+
def build_runners_consumer_group_create(conn, running_platform, id="$"):
174+
consumer_group_name = get_runners_consumer_group_name(running_platform)
175+
logging.info("Will use consumer group named {}.".format(consumer_group_name))
174176
try:
175177
conn.xgroup_create(
176178
STREAM_KEYNAME_NEW_BUILD_EVENTS,
177-
STREAM_GH_NEW_BUILD_RUNNERS_CG,
179+
consumer_group_name,
178180
mkstream=True,
179181
id=id,
180182
)
181183
logging.info(
182184
"Created consumer group named {} to distribute work.".format(
183-
STREAM_GH_NEW_BUILD_RUNNERS_CG
185+
consumer_group_name
184186
)
185187
)
186188
except redis.exceptions.ResponseError:
187189
logging.info(
188-
"Consumer group named {} already existed.".format(
189-
STREAM_GH_NEW_BUILD_RUNNERS_CG
190-
)
190+
"Consumer group named {} already existed.".format(consumer_group_name)
191191
)
192192

193193

194+
def get_runners_consumer_group_name(running_platform):
195+
consumer_group_name = "{}-{}".format(
196+
STREAM_GH_NEW_BUILD_RUNNERS_CG, running_platform
197+
)
198+
return consumer_group_name
199+
200+
194201
def self_contained_coordinator_blocking_read(
195202
conn,
196203
datasink_push_results_redistimeseries,
@@ -205,10 +212,10 @@ def self_contained_coordinator_blocking_read(
205212
num_process_streams = 0
206213
overall_result = False
207214
consumer_name = "{}-self-contained-proc#{}".format(
208-
STREAM_GH_NEW_BUILD_RUNNERS_CG, "1"
215+
get_runners_consumer_group_name(platform_name), "1"
209216
)
210217
newTestInfo = conn.xreadgroup(
211-
STREAM_GH_NEW_BUILD_RUNNERS_CG,
218+
get_runners_consumer_group_name(platform_name),
212219
consumer_name,
213220
{STREAM_KEYNAME_NEW_BUILD_EVENTS: stream_id},
214221
count=1,
@@ -307,20 +314,6 @@ def process_self_contained_coordinator_stream(
307314
tf_github_org = "redis"
308315
tf_github_repo = "redis"
309316
tf_triggering_env = "ci"
310-
(
311-
prefix,
312-
testcases_setname,
313-
tsname_project_total_failures,
314-
tsname_project_total_success,
315-
running_platforms_setname,
316-
testcases_build_variant_setname,
317-
) = get_overall_dashboard_keynames(
318-
tf_github_org,
319-
tf_github_repo,
320-
tf_triggering_env,
321-
build_variant_name,
322-
running_platform,
323-
)
324317

325318
benchmark_tool = "redis-benchmark"
326319
for build_artifact in build_artifacts:
@@ -483,12 +476,10 @@ def process_self_contained_coordinator_stream(
483476
rts,
484477
start_time_ms,
485478
test_name,
486-
testcases_setname,
487479
git_branch,
488480
tf_github_org,
489481
tf_github_repo,
490482
tf_triggering_env,
491-
tsname_project_total_success,
492483
metadata,
493484
build_variant_name,
494485
running_platform,

utils/tests/test_data/dump.rdb

0 Bytes
Binary file not shown.

utils/tests/test_self_contained_coordinator.py

+41-2
Original file line numberDiff line numberDiff line change
@@ -6,6 +6,7 @@
66
import yaml
77
from pathlib import Path
88

9+
from redisbench_admin.utils.remote import get_overall_dashboard_keynames
910
from redisbench_admin.utils.utils import get_ts_metric_name
1011

1112
from redis_benchmarks_specification.__api__.schema import commit_schema_to_stream
@@ -82,13 +83,13 @@ def test_self_contained_coordinator_blocking_read():
8283

8384
assert conn.exists(STREAM_KEYNAME_NEW_BUILD_EVENTS)
8485
assert conn.xlen(STREAM_KEYNAME_NEW_BUILD_EVENTS) > 0
86+
running_platform = "fco-ThinkPad-T490"
8587

86-
build_runners_consumer_group_create(conn, "0")
88+
build_runners_consumer_group_create(conn, running_platform, "0")
8789
rts = redistimeseries.client.Client(port=16379)
8890
docker_client = docker.from_env()
8991
home = str(Path.home())
9092
stream_id = ">"
91-
running_platform = "fco-ThinkPad-T490"
9293
topologies_map = get_topologies(
9394
"./redis_benchmarks_specification/setups/topologies/topologies.yml"
9495
)
@@ -140,5 +141,43 @@ def test_self_contained_coordinator_blocking_read():
140141

141142
assert ts_key_name.encode() in conn.keys()
142143

144+
(
145+
prefix,
146+
testcases_setname,
147+
tsname_project_total_failures,
148+
tsname_project_total_success,
149+
running_platforms_setname,
150+
build_variant_setname,
151+
testcases_metric_context_path_setname,
152+
testcases_and_metric_context_path_setname,
153+
) = get_overall_dashboard_keynames(
154+
tf_github_org, tf_github_repo, tf_triggering_env, test_name
155+
)
156+
assert rts.redis.exists(testcases_setname)
157+
assert rts.redis.exists(running_platforms_setname)
158+
assert rts.redis.exists(build_variant_setname)
159+
assert build_variant_name.encode() in rts.redis.smembers(
160+
build_variant_setname
161+
)
162+
assert test_name.encode() in rts.redis.smembers(testcases_setname)
163+
assert running_platform.encode() in rts.redis.smembers(
164+
running_platforms_setname
165+
)
166+
testcases_and_metric_context_path_members = [
167+
x.decode()
168+
for x in rts.redis.smembers(testcases_and_metric_context_path_setname)
169+
]
170+
metric_context_path_members = [
171+
x.decode()
172+
for x in rts.redis.smembers(testcases_metric_context_path_setname)
173+
]
174+
assert len(testcases_and_metric_context_path_members) == len(
175+
metric_context_path_members
176+
)
177+
178+
assert [x.decode() for x in rts.redis.smembers(testcases_setname)] == [
179+
test_name
180+
]
181+
143182
except redis.exceptions.ConnectionError:
144183
pass

0 commit comments

Comments
 (0)