Skip to content

Commit 61016b5

Browse files
committed
Add OMB & K6 experiment files
1 parent 14df34d commit 61016b5

10 files changed

Lines changed: 5362 additions & 0 deletions
File renamed without changes.
Lines changed: 76 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,76 @@
1+
import json
2+
import matplotlib.pyplot as plt
3+
import sys
4+
import os
5+
6+
def load_metrics(filename):
7+
"""
8+
Parses the flat-array JSON format from OpenMessaging Benchmark
9+
"""
10+
if not os.path.exists(filename):
11+
print(f"Error: File {filename} not found.")
12+
return None
13+
14+
with open(filename, 'r') as f:
15+
data = json.load(f)
16+
17+
# We generate a 'time' axis based on the number of data points
18+
count = len(data.get('publishRate', []))
19+
time_axis = list(range(1, count + 1))
20+
21+
return {
22+
'driver': data.get('driver', 'Unknown'),
23+
'time': time_axis,
24+
'throughput': data.get('publishRate', []),
25+
'p99_pub_latency': data.get('publishLatency99pct', []),
26+
'p99_e2e_latency': data.get('endToEndLatency99pct', [])
27+
}
28+
29+
def plot_comparison(rabbit_file, kafka_file):
30+
rabbit_data = load_metrics(rabbit_file)
31+
kafka_data = load_metrics(kafka_file)
32+
33+
if not rabbit_data or not kafka_data:
34+
return
35+
36+
# Create a figure with 2 subplots (Latency and Throughput)
37+
fig, (ax1, ax2) = plt.subplots(2, 1, figsize=(12, 10))
38+
39+
# Plot 1: P99 Publish Latency (Responsiveness)
40+
ax1.plot(rabbit_data['time'], rabbit_data['p99_pub_latency'],
41+
label='RabbitMQ', marker='o', linestyle='-', color='orange')
42+
ax1.plot(kafka_data['time'], kafka_data['p99_pub_latency'],
43+
label='Kafka', marker='x', linestyle='--', color='blue')
44+
45+
ax1.set_title('Hypothesis 1: P99 Publish Latency (Lower is Better)')
46+
ax1.set_ylabel('Latency (ms)')
47+
ax1.set_xlabel('Benchmark Interval')
48+
ax1.grid(True)
49+
ax1.legend()
50+
51+
# Plot 2: Throughput Stability
52+
ax2.plot(rabbit_data['time'], rabbit_data['throughput'],
53+
label='RabbitMQ', color='orange')
54+
ax2.plot(kafka_data['time'], kafka_data['throughput'],
55+
label='Kafka', color='blue')
56+
57+
# Add a line for the Target Rate (1000)
58+
ax2.axhline(y=1000, color='r', linestyle=':', label='Target Rate (1000 msg/s)')
59+
60+
ax2.set_title('Throughput Stability (Target: 1000 msg/s)')
61+
ax2.set_ylabel('Messages / Second')
62+
ax2.set_xlabel('Benchmark Interval')
63+
ax2.grid(True)
64+
ax2.legend()
65+
66+
plt.tight_layout()
67+
output_file = 'benchmark_comparison_simple.png'
68+
plt.savefig(output_file)
69+
print(f"Successfully generated comparison graph: {output_file}")
70+
71+
if __name__ == "__main__":
72+
if len(sys.argv) < 3:
73+
print("Usage: python analyze_results_1000.py <rabbitmq_result.json> <kafka_result.json>")
74+
sys.exit(1)
75+
76+
plot_comparison(sys.argv[1], sys.argv[2])
Lines changed: 75 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,75 @@
1+
import json
2+
import matplotlib.pyplot as plt
3+
import sys
4+
import os
5+
6+
def load_metrics(filename):
7+
"""
8+
Parses the flat-array JSON format from OpenMessaging Benchmark
9+
"""
10+
if not os.path.exists(filename):
11+
print(f"Error: File {filename} not found.")
12+
return None
13+
14+
with open(filename, 'r') as f:
15+
data = json.load(f)
16+
17+
# We generate a 'time' axis based on the number of data points
18+
count = len(data.get('publishRate', []))
19+
time_axis = list(range(1, count + 1))
20+
21+
return {
22+
'driver': data.get('driver', 'Unknown'),
23+
'time': time_axis,
24+
'throughput': data.get('publishRate', []),
25+
'p99_pub_latency': data.get('publishLatency99pct', []),
26+
'p99_e2e_latency': data.get('endToEndLatency99pct', [])
27+
}
28+
29+
def plot_comparison(rabbit_file, kafka_file):
30+
rabbit_data = load_metrics(rabbit_file)
31+
kafka_data = load_metrics(kafka_file)
32+
33+
if not rabbit_data or not kafka_data:
34+
return
35+
36+
# Create a figure with 2 subplots (Latency and Throughput)
37+
fig, (ax1, ax2) = plt.subplots(2, 1, figsize=(12, 10))
38+
39+
# Plot 1: P99 Publish Latency (Responsiveness)
40+
ax1.plot(rabbit_data['time'], rabbit_data['p99_pub_latency'],
41+
label='RabbitMQ', marker='o', linestyle='-', color='orange')
42+
ax1.plot(kafka_data['time'], kafka_data['p99_pub_latency'],
43+
label='Kafka', marker='x', linestyle='--', color='blue')
44+
45+
ax1.set_title('Hypothesis 1: P99 Publish Latency (Lower is Better)')
46+
ax1.set_ylabel('Latency (ms)')
47+
ax1.set_xlabel('Benchmark Interval')
48+
ax1.grid(True)
49+
ax1.legend()
50+
51+
# Plot 2: Throughput Stability
52+
ax2.plot(rabbit_data['time'], rabbit_data['throughput'],
53+
label='RabbitMQ', color='orange')
54+
ax2.plot(kafka_data['time'], kafka_data['throughput'],
55+
label='Kafka', color='blue')
56+
57+
ax2.axhline(y=10000, color='r', linestyle=':', label='Target Rate (10000 msg/s)')
58+
59+
ax2.set_title('Throughput Stability (Target: 10000 msg/s)')
60+
ax2.set_ylabel('Messages / Second')
61+
ax2.set_xlabel('Benchmark Interval')
62+
ax2.grid(True)
63+
ax2.legend()
64+
65+
plt.tight_layout()
66+
output_file = 'benchmark_comparison_stress.png'
67+
plt.savefig(output_file)
68+
print(f"Successfully generated comparison graph: {output_file}")
69+
70+
if __name__ == "__main__":
71+
if len(sys.argv) < 3:
72+
print("Usage: python analyze_results_10000.py <rabbitmq_result.json> <kafka_result.json>")
73+
sys.exit(1)
74+
75+
plot_comparison(sys.argv[1], sys.argv[2])
Lines changed: 19 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,19 @@
1+
name: Kafka
2+
driverClass: io.openmessaging.benchmark.driver.kafka.KafkaBenchmarkDriver
3+
4+
# Kafka Client configuration
5+
replicationFactor: 1
6+
topicConfig: |
7+
min.insync.replicas=1
8+
9+
commonConfig: |
10+
bootstrap.servers=localhost:29092
11+
12+
producerConfig: |
13+
acks=1
14+
linger.ms=1
15+
batch.size=131072
16+
17+
consumerConfig: |
18+
auto.offset.reset=earliest
19+
enable.auto.commit=false
Lines changed: 12 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,12 @@
1+
name: RabbitMQ
2+
driverClass: io.openmessaging.benchmark.driver.rabbitmq.RabbitMqBenchmarkDriver
3+
4+
# RabbitMQ Connection configuration
5+
amqpUris: amqp://localhost
6+
7+
# Topology configuration
8+
exchangeType: topic
9+
exchangeName: benchmark-exchange
10+
durable: true
11+
persistentDelivery: true # Matches Kafka's disk persistence
12+
messagePersistence: true

0 commit comments

Comments
 (0)