From fa27f5dbbbbb28f24ab6b159ea08db572945b38b Mon Sep 17 00:00:00 2001 From: cclauss Date: Wed, 29 Aug 2018 20:59:19 +0200 Subject: [PATCH] Spanner benchmarks: print() is a function in Python 3 (#5862) --- bigquery/benchmark/benchmark.py | 4 +-- spanner/benchmark/ycsb.py | 46 ++++++++++++++++----------------- 2 files changed, 25 insertions(+), 25 deletions(-) diff --git a/bigquery/benchmark/benchmark.py b/bigquery/benchmark/benchmark.py index 9c71cb943271..2917f169aba1 100644 --- a/bigquery/benchmark/benchmark.py +++ b/bigquery/benchmark/benchmark.py @@ -42,5 +42,5 @@ raise Exception('found {0} columsn, expected {1}'.format(len(row), num_cols)) num_rows += 1 total_time = datetime.now() - start_time - print "query {0}: {1} rows, {2} cols, first byte {3} sec, total {4} sec"\ - .format(query, num_rows, num_cols, first_byte_time.total_seconds(), total_time.total_seconds()) + print("query {0}: {1} rows, {2} cols, first byte {3} sec, total {4} sec" + .format(query, num_rows, num_cols, first_byte_time.total_seconds(), total_time.total_seconds())) diff --git a/spanner/benchmark/ycsb.py b/spanner/benchmark/ycsb.py index e0ef718c0fba..bad4e0fe9271 100644 --- a/spanner/benchmark/ycsb.py +++ b/spanner/benchmark/ycsb.py @@ -147,38 +147,38 @@ def aggregate_metrics(latencies_ms, duration_ms, num_bucket): latency in latencies_ms.iteritems()} overall_op_count = sum([op_count for op_count in op_counts.itervalues()]) - print '[OVERALL], RunTime(ms), %f' % duration_ms - print '[OVERALL], Throughput(ops/sec), %f' % (float(overall_op_count) / - duration_ms * 1000.0) + print('[OVERALL], RunTime(ms), %f' % duration_ms) + print('[OVERALL], Throughput(ops/sec), %f' % (float(overall_op_count) / + duration_ms * 1000.0)) for operation in op_counts.keys(): operation_upper = operation.upper() - print '[%s], Operations, %d' % (operation_upper, op_counts[operation]) - print '[%s], AverageLatency(us), %f' % ( - operation_upper, numpy.average(latencies_ms[operation]) * 1000.0) - print '[%s], LatencyVariance(us), %f' % ( - operation_upper, numpy.var(latencies_ms[operation]) * 1000.0) - print '[%s], MinLatency(us), %f' % ( - operation_upper, min(latencies_ms[operation]) * 1000.0) - print '[%s], MaxLatency(us), %f' % ( - operation_upper, max(latencies_ms[operation]) * 1000.0) - print '[%s], 95thPercentileLatency(us), %f' % ( + print('[%s], Operations, %d' % (operation_upper, op_counts[operation])) + print('[%s], AverageLatency(us), %f' % ( + operation_upper, numpy.average(latencies_ms[operation]) * 1000.0)) + print('[%s], LatencyVariance(us), %f' % ( + operation_upper, numpy.var(latencies_ms[operation]) * 1000.0)) + print('[%s], MinLatency(us), %f' % ( + operation_upper, min(latencies_ms[operation]) * 1000.0)) + print('[%s], MaxLatency(us), %f' % ( + operation_upper, max(latencies_ms[operation]) * 1000.0)) + print('[%s], 95thPercentileLatency(us), %f' % ( operation_upper, - numpy.percentile(latencies_ms[operation], 95.0) * 1000.0) - print '[%s], 99thPercentileLatency(us), %f' % ( + numpy.percentile(latencies_ms[operation], 95.0) * 1000.0)) + print('[%s], 99thPercentileLatency(us), %f' % ( operation_upper, - numpy.percentile(latencies_ms[operation], 99.0) * 1000.0) - print '[%s], 99.9thPercentileLatency(us), %f' % ( + numpy.percentile(latencies_ms[operation], 99.0) * 1000.0)) + print('[%s], 99.9thPercentileLatency(us), %f' % ( operation_upper, - numpy.percentile(latencies_ms[operation], 99.9) * 1000.0) - print '[%s], Return=OK, %d' % (operation_upper, op_counts[operation]) + numpy.percentile(latencies_ms[operation], 99.9) * 1000.0)) + print('[%s], Return=OK, %d' % (operation_upper, op_counts[operation])) latency_array = numpy.array(latencies_ms[operation]) for j in range(num_bucket): - print '[%s], %d, %d' % ( + print('[%s], %d, %d' % ( operation_upper, j, - ((j <= latency_array) & (latency_array < (j + 1))).sum()) - print '[%s], >%d, %d' % (operation_upper, num_bucket, - (num_bucket <= latency_array).sum()) + ((j <= latency_array) & (latency_array < (j + 1))).sum())) + print('[%s], >%d, %d' % (operation_upper, num_bucket, + (num_bucket <= latency_array).sum())) class WorkloadThread(threading.Thread):