Skip to content

Commit 4f0bc9d

Browse files
authored
Add semaphore block for ducktape tests (#2037)
* Add semaphore block for ducktape tests * Increase kafka start timeout * Increase kafka start timeout * Increase kafka start timeout * Add logs to debug pipeline * Start kafka in kraft mode * Fix directory failures * Fix directory failures * Fix directory failures * templatise path * Fix ductape run * Fix kafka broker listner * Fix ducktape version error * Cleanup * Fix bound voilation should fail tests * Now expand bounds for success * Add schema registry instance * Update Schema Registry hostname * Update Schema Registry hostname * Update Schema Registry hostname * Fix for linux CI environment * Address minor feedback * Fix semaphore * Minor fix after rebase
1 parent 9b68959 commit 4f0bc9d

File tree

7 files changed

+298
-44
lines changed

7 files changed

+298
-44
lines changed

.semaphore/semaphore.yml

Lines changed: 100 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -275,6 +275,106 @@ blocks:
275275
- python3 -m venv _venv && source _venv/bin/activate
276276
- chmod u+r+x tools/source-package-verification.sh
277277
- tools/source-package-verification.sh
278+
- name: "Ducktape Performance Tests (Linux x64)"
279+
dependencies: []
280+
task:
281+
agent:
282+
machine:
283+
type: s1-prod-ubuntu24-04-amd64-3
284+
env_vars:
285+
- name: OS_NAME
286+
value: linux
287+
- name: ARCH
288+
value: x64
289+
- name: BENCHMARK_BOUNDS_CONFIG
290+
value: tests/ducktape/benchmark_bounds.json
291+
- name: BENCHMARK_ENVIRONMENT
292+
value: ci
293+
prologue:
294+
commands:
295+
- '[[ -z $DOCKERHUB_APIKEY ]] || docker login --username $DOCKERHUB_USER --password $DOCKERHUB_APIKEY'
296+
jobs:
297+
- name: Build and Tests
298+
commands:
299+
# Setup Python environment
300+
- sem-version python 3.9
301+
- python3 -m venv _venv && source _venv/bin/activate
302+
303+
# Install ducktape framework and additional dependencies
304+
- pip install ducktape psutil
305+
306+
# Install existing test requirements
307+
- pip install -r requirements/requirements-tests.txt
308+
309+
# Build and install confluent-kafka from source
310+
- lib_dir=dest/runtimes/$OS_NAME-$ARCH/native
311+
- tools/wheels/install-librdkafka.sh "${LIBRDKAFKA_VERSION#v}" dest
312+
- export CFLAGS="$CFLAGS -I${PWD}/dest/build/native/include"
313+
- export LDFLAGS="$LDFLAGS -L${PWD}/${lib_dir}"
314+
- export LD_LIBRARY_PATH="$LD_LIBRARY_PATH:$PWD/$lib_dir"
315+
- python3 -m pip install -e .
316+
317+
# Store project root for reliable navigation
318+
- PROJECT_ROOT="${PWD}"
319+
320+
# Start Kafka cluster and Schema Registry using dedicated ducktape compose file (KRaft mode)
321+
- cd "${PROJECT_ROOT}/tests/docker"
322+
- docker-compose -f docker-compose.ducktape.yml up -d kafka schema-registry
323+
324+
# Debug: Check container status and logs
325+
- echo "=== Container Status ==="
326+
- docker-compose -f docker-compose.ducktape.yml ps
327+
- echo "=== Kafka Logs ==="
328+
- docker-compose -f docker-compose.ducktape.yml logs kafka | tail -50
329+
330+
# Wait for Kafka to be ready (using PLAINTEXT listener for external access)
331+
- |
332+
timeout 1800 bash -c '
333+
counter=0
334+
until docker-compose -f docker-compose.ducktape.yml exec -T kafka kafka-topics --bootstrap-server localhost:9092 --list >/dev/null 2>&1; do
335+
echo "Waiting for Kafka... (attempt $((counter+1)))"
336+
337+
# Show logs every 4th attempt (every 20 seconds)
338+
if [ $((counter % 4)) -eq 0 ] && [ $counter -gt 0 ]; then
339+
echo "=== Recent Kafka Logs ==="
340+
docker-compose -f docker-compose.ducktape.yml logs --tail=10 kafka
341+
echo "=== Container Status ==="
342+
docker-compose -f docker-compose.ducktape.yml ps kafka
343+
fi
344+
345+
counter=$((counter+1))
346+
sleep 5
347+
done
348+
'
349+
- echo "Kafka cluster is ready!"
350+
351+
# Wait for Schema Registry to be ready
352+
- echo "=== Waiting for Schema Registry ==="
353+
- |
354+
timeout 300 bash -c '
355+
counter=0
356+
until curl -f http://localhost:8081/subjects >/dev/null 2>&1; do
357+
echo "Waiting for Schema Registry... (attempt $((counter+1)))"
358+
359+
# Show logs every 3rd attempt (every 15 seconds)
360+
if [ $((counter % 3)) -eq 0 ] && [ $counter -gt 0 ]; then
361+
echo "=== Recent Schema Registry Logs ==="
362+
docker-compose -f docker-compose.ducktape.yml logs --tail=10 schema-registry
363+
echo "=== Schema Registry Container Status ==="
364+
docker-compose -f docker-compose.ducktape.yml ps schema-registry
365+
fi
366+
367+
counter=$((counter+1))
368+
sleep 5
369+
done
370+
'
371+
- echo "Schema Registry is ready!"
372+
373+
# Run standard ducktape tests with CI bounds
374+
- cd "${PROJECT_ROOT}" && PYTHONPATH="${PROJECT_ROOT}" python tests/ducktape/run_ducktape_test.py
375+
376+
# Cleanup
377+
- cd "${PROJECT_ROOT}/tests/docker" && docker-compose -f docker-compose.ducktape.yml down -v || true
278378
- name: "Packaging"
279379
run:
280380
when: "tag =~ '.*'"
Lines changed: 36 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,36 @@
1+
services:
2+
kafka:
3+
image: confluentinc/cp-kafka:latest
4+
container_name: kafka-ducktape
5+
ports:
6+
- "9092:9092"
7+
- "29092:29092"
8+
environment:
9+
KAFKA_NODE_ID: 1
10+
KAFKA_PROCESS_ROLES: broker,controller
11+
KAFKA_CONTROLLER_QUORUM_VOTERS: 1@kafka:9093
12+
KAFKA_CONTROLLER_LISTENER_NAMES: CONTROLLER
13+
KAFKA_LISTENERS: PLAINTEXT://0.0.0.0:9092,CONTROLLER://0.0.0.0:9093,PLAINTEXT_HOST://0.0.0.0:29092
14+
KAFKA_ADVERTISED_LISTENERS: PLAINTEXT://localhost:9092,PLAINTEXT_HOST://dockerhost:29092
15+
KAFKA_LISTENER_SECURITY_PROTOCOL_MAP: CONTROLLER:PLAINTEXT,PLAINTEXT:PLAINTEXT,PLAINTEXT_HOST:PLAINTEXT
16+
KAFKA_INTER_BROKER_LISTENER_NAME: PLAINTEXT
17+
KAFKA_OFFSETS_TOPIC_REPLICATION_FACTOR: 1
18+
KAFKA_TRANSACTION_STATE_LOG_REPLICATION_FACTOR: 1
19+
KAFKA_TRANSACTION_STATE_LOG_MIN_ISR: 1
20+
CLUSTER_ID: 4L6g3nShT-eMCtK--X86sw
21+
22+
schema-registry:
23+
image: confluentinc/cp-schema-registry:latest
24+
container_name: schema-registry-ducktape
25+
depends_on:
26+
- kafka
27+
ports:
28+
- "8081:8081"
29+
extra_hosts:
30+
- "dockerhost:172.17.0.1"
31+
environment:
32+
SCHEMA_REGISTRY_HOST_NAME: schema-registry
33+
SCHEMA_REGISTRY_KAFKASTORE_BOOTSTRAP_SERVERS: dockerhost:29092
34+
SCHEMA_REGISTRY_LISTENERS: http://0.0.0.0:8081
35+
SCHEMA_REGISTRY_KAFKASTORE_TOPIC_REPLICATION_FACTOR: 1
36+
SCHEMA_REGISTRY_DEBUG: 'true'

tests/ducktape/README.md

Lines changed: 46 additions & 14 deletions
Original file line numberDiff line numberDiff line change
@@ -5,7 +5,8 @@ Ducktape-based producer tests for the Confluent Kafka Python client with compreh
55
## Prerequisites
66

77
- `pip install ducktape confluent-kafka psutil`
8-
- Kafka running on `localhost:9092`
8+
- Kafka running on `localhost:9092` (PLAINTEXT listener - ducktape tests use the simple port)
9+
- Schema Registry running on `localhost:8081` (uses `host.docker.internal:29092` for Kafka connection)
910

1011
## Running Tests
1112

@@ -40,29 +41,60 @@ Every test automatically includes:
4041

4142
## Configuration
4243

43-
Performance bounds are loaded from a JSON config file. By default, it loads `benchmark_bounds.json`, but you can override this with the `BENCHMARK_BOUNDS_CONFIG` environment variable:
44+
Performance bounds are loaded from an environment-based JSON config file. By default, it loads `benchmark_bounds.json`, but you can override this with the `BENCHMARK_BOUNDS_CONFIG` environment variable.
45+
46+
### Environment-Based Configuration
47+
48+
The bounds configuration supports different environments with different performance thresholds:
4449

4550
```json
4651
{
47-
"min_throughput_msg_per_sec": 1500.0,
48-
"max_p95_latency_ms": 1500.0,
49-
"max_error_rate": 0.01,
50-
"min_success_rate": 0.99,
51-
"max_p99_latency_ms": 2500.0,
52-
"max_memory_growth_mb": 600.0,
53-
"max_buffer_full_rate": 0.03,
54-
"min_messages_per_poll": 15.0
52+
"_comment": "Performance bounds for benchmark tests by environment",
53+
"local": {
54+
"_comment": "Default bounds for local development - more relaxed thresholds",
55+
"min_throughput_msg_per_sec": 1000.0,
56+
"max_p95_latency_ms": 2000.0,
57+
"max_error_rate": 0.02,
58+
"min_success_rate": 0.98,
59+
"max_p99_latency_ms": 3000.0,
60+
"max_memory_growth_mb": 800.0,
61+
"max_buffer_full_rate": 0.05,
62+
"min_messages_per_poll": 10.0
63+
},
64+
"ci": {
65+
"_comment": "Stricter bounds for CI environment - production-like requirements",
66+
"min_throughput_msg_per_sec": 1500.0,
67+
"max_p95_latency_ms": 1500.0,
68+
"max_error_rate": 0.01,
69+
"min_success_rate": 0.99,
70+
"max_p99_latency_ms": 2500.0,
71+
"max_memory_growth_mb": 600.0,
72+
"max_buffer_full_rate": 0.03,
73+
"min_messages_per_poll": 15.0
74+
},
75+
"_default_environment": "local"
5576
}
5677
```
5778

79+
### Environment Selection
80+
81+
- **BENCHMARK_ENVIRONMENT**: Selects which environment bounds to use (`local`, `ci`, etc.)
82+
- **Default**: Uses "local" environment if not specified
83+
- **CI**: Automatically uses "ci" environment in CI pipelines
84+
5885
Usage:
5986
```bash
60-
# Use default config file
87+
# Use default environment (local)
6188
./run_ducktape_test.py
6289

63-
# Use different configs for different environments
64-
BENCHMARK_BOUNDS_CONFIG=ci_bounds.json ./run_ducktape_test.py
65-
BENCHMARK_BOUNDS_CONFIG=production_bounds.json ./run_ducktape_test.py
90+
# Explicitly use local environment
91+
BENCHMARK_ENVIRONMENT=local ./run_ducktape_test.py
92+
93+
# Use CI environment with stricter bounds
94+
BENCHMARK_ENVIRONMENT=ci ./run_ducktape_test.py
95+
96+
# Use different config file entirely
97+
BENCHMARK_BOUNDS_CONFIG=custom_bounds.json ./run_ducktape_test.py
6698
```
6799

68100
```python
Lines changed: 24 additions & 9 deletions
Original file line numberDiff line numberDiff line change
@@ -1,11 +1,26 @@
11
{
2-
"_comment": "Default performance bounds for benchmark tests",
3-
"min_throughput_msg_per_sec": 1500.0,
4-
"max_p95_latency_ms": 1500.0,
5-
"max_error_rate": 0.01,
6-
"min_success_rate": 0.99,
7-
"max_p99_latency_ms": 2500.0,
8-
"max_memory_growth_mb": 600.0,
9-
"max_buffer_full_rate": 0.03,
10-
"min_messages_per_poll": 15.0
2+
"_comment": "Performance bounds for benchmark tests by environment",
3+
"local": {
4+
"_comment": "Default bounds for local development - more relaxed thresholds",
5+
"min_throughput_msg_per_sec": 1000.0,
6+
"max_p95_latency_ms": 2000.0,
7+
"max_error_rate": 0.02,
8+
"min_success_rate": 0.98,
9+
"max_p99_latency_ms": 3000.0,
10+
"max_memory_growth_mb": 800.0,
11+
"max_buffer_full_rate": 0.05,
12+
"min_messages_per_poll": 10.0
13+
},
14+
"ci": {
15+
"_comment": "Stricter bounds for CI environment - production-like requirements",
16+
"min_throughput_msg_per_sec": 1500.0,
17+
"max_p95_latency_ms": 1500.0,
18+
"max_error_rate": 0.01,
19+
"min_success_rate": 0.99,
20+
"max_p99_latency_ms": 2500.0,
21+
"max_memory_growth_mb": 600.0,
22+
"max_buffer_full_rate": 0.03,
23+
"min_messages_per_poll": 10.0
24+
},
25+
"_default_environment": "local"
1126
}

tests/ducktape/benchmark_metrics.py

Lines changed: 14 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -265,8 +265,20 @@ def _load_from_config_file(self, config_path: str):
265265
with open(config_path, 'r') as f:
266266
config = json.load(f)
267267

268-
# Set values from config file
269-
for key, value in config.items():
268+
# Always use environment-based format
269+
environment = os.getenv('BENCHMARK_ENVIRONMENT',
270+
config.get('_default_environment', 'local'))
271+
if environment not in config:
272+
available_envs = [k for k in config.keys() if not k.startswith('_')]
273+
raise ValueError(
274+
f"Environment '{environment}' not found in config. "
275+
f"Available environments: {available_envs}"
276+
)
277+
bounds_config = config[environment]
278+
print(f"Loading benchmark bounds for environment: {environment}")
279+
280+
# Set values from the selected configuration
281+
for key, value in bounds_config.items():
270282
if not key.startswith('_'): # Skip comment fields like "_comment"
271283
setattr(self, key, value)
272284

tests/ducktape/run_ducktape_test.py

Lines changed: 69 additions & 14 deletions
Original file line numberDiff line numberDiff line change
@@ -31,26 +31,25 @@ def get_test_info(test_type):
3131
return test_info.get(test_type)
3232

3333

34-
def main():
35-
"""Run the ducktape test based on specified type"""
36-
parser = argparse.ArgumentParser(description="Confluent Kafka Python - Ducktape Test Runner")
37-
parser.add_argument('test_type', choices=['producer', 'consumer', 'producer_sr'],
38-
help='Type of test to run')
39-
parser.add_argument('test_method', nargs='?',
40-
help='Specific test method to run (optional)')
41-
parser.add_argument('--debug', action='store_true',
42-
help='Enable debug output')
43-
44-
args = parser.parse_args()
45-
34+
def run_single_test_type(args):
35+
"""Run a single test type"""
4636
test_info = get_test_info(args.test_type)
4737

4838
# Header
4939
print(f"Confluent Kafka Python - {test_info['description']}")
5040
print(f"Timestamp: {datetime.now().isoformat()}")
5141
print("=" * 70)
5242

53-
print(f"Using ducktape version: {ducktape.__version__}")
43+
try:
44+
print(f"Using ducktape version: {ducktape.__version__}")
45+
except AttributeError:
46+
# Some ducktape versions don't have __version__, try alternative methods
47+
try:
48+
import pkg_resources
49+
version = pkg_resources.get_distribution('ducktape').version
50+
print(f"Using ducktape version: {version}")
51+
except Exception:
52+
print("Using ducktape version: unknown")
5453

5554
# Check if confluent_kafka is available
5655
try:
@@ -128,5 +127,61 @@ def main():
128127
return 1
129128

130129

130+
def run_all_tests(args):
131+
"""Run all available test types"""
132+
test_types = ['producer', 'consumer', 'producer_sr']
133+
overall_success = True
134+
135+
print("Confluent Kafka Python - All Ducktape Tests")
136+
print(f"Timestamp: {datetime.now().isoformat()}")
137+
print("=" * 70)
138+
139+
for test_type in test_types:
140+
print(f"\n{'='*20} Running {test_type.upper()} Tests {'='*20}")
141+
142+
# Create a new args object for this test type
143+
test_args = argparse.Namespace(
144+
test_type=test_type,
145+
test_method=args.test_method,
146+
debug=args.debug
147+
)
148+
149+
# Run the specific test type
150+
result = run_single_test_type(test_args)
151+
if result != 0:
152+
overall_success = False
153+
print(f"\n{test_type.upper()} tests failed!")
154+
else:
155+
print(f"\n{test_type.upper()} tests passed!")
156+
157+
print(f"\n{'='*70}")
158+
if overall_success:
159+
print("🎉 All tests completed successfully!")
160+
return 0
161+
else:
162+
print("💥 Some tests failed. Check the output above for details.")
163+
return 1
164+
165+
166+
def main():
167+
"""Run the ducktape test based on specified type"""
168+
parser = argparse.ArgumentParser(description="Confluent Kafka Python - Ducktape Test Runner")
169+
parser.add_argument('test_type', nargs='?', choices=['producer', 'consumer', 'producer_sr'],
170+
help='Type of test to run (default: run all tests)')
171+
parser.add_argument('test_method', nargs='?',
172+
help='Specific test method to run (optional)')
173+
parser.add_argument('--debug', action='store_true',
174+
help='Enable debug output')
175+
176+
args = parser.parse_args()
177+
178+
# If no test_type provided, run all tests
179+
if args.test_type is None:
180+
return run_all_tests(args)
181+
182+
# Run single test type
183+
return run_single_test_type(args)
184+
185+
131186
if __name__ == "__main__":
132-
sys.exit(main())
187+
sys.exit(main())

0 commit comments

Comments
 (0)