Skip to content

Commit b83feec

Browse files
committed
Vendor six consistently
Use vendored `six`, and also `six.moves.range` rather than `xrange`
1 parent a6be21e commit b83feec

15 files changed

+34
-30
lines changed

benchmarks/consumer_performance.py

+3-1
Original file line numberDiff line numberDiff line change
@@ -10,6 +10,8 @@
1010
import threading
1111
import traceback
1212

13+
from kafka.vendor.six.moves import range
14+
1315
from kafka import KafkaConsumer, KafkaProducer
1416
from test.fixtures import KafkaFixture, ZookeeperFixture
1517

@@ -64,7 +66,7 @@ def run(args):
6466
record = bytes(bytearray(args.record_size))
6567
producer = KafkaProducer(compression_type=args.fixture_compression,
6668
**props)
67-
for i in xrange(args.num_records):
69+
for i in range(args.num_records):
6870
producer.send(topic=args.topic, value=record)
6971
producer.flush()
7072
producer.close()

benchmarks/producer_performance.py

+3-1
Original file line numberDiff line numberDiff line change
@@ -9,6 +9,8 @@
99
import threading
1010
import traceback
1111

12+
from kafka.vendor.six.moves import range
13+
1214
from kafka import KafkaProducer
1315
from test.fixtures import KafkaFixture, ZookeeperFixture
1416

@@ -77,7 +79,7 @@ def run(args):
7779
print('-> OK!')
7880
print()
7981

80-
for i in xrange(args.num_records):
82+
for i in range(args.num_records):
8183
producer.send(topic=args.topic, value=record)
8284
producer.flush()
8385

benchmarks/varint_speed.py

+1-1
Original file line numberDiff line numberDiff line change
@@ -1,7 +1,7 @@
11
#!/usr/bin/env python
22
from __future__ import print_function
33
import perf
4-
import six
4+
from kafka.vendor import six
55

66

77
test_data = [

kafka/codec.py

+2-2
Original file line numberDiff line numberDiff line change
@@ -6,7 +6,7 @@
66
import struct
77

88
from kafka.vendor import six
9-
from kafka.vendor.six.moves import xrange # pylint: disable=import-error
9+
from kafka.vendor.six.moves import range
1010

1111
_XERIAL_V1_HEADER = (-126, b'S', b'N', b'A', b'P', b'P', b'Y', 0, 1, 1)
1212
_XERIAL_V1_FORMAT = 'bccccccBii'
@@ -150,7 +150,7 @@ def snappy_encode(payload, xerial_compatible=True, xerial_blocksize=32*1024):
150150
chunker = lambda payload, i, size: memoryview(payload)[i:size+i].tobytes()
151151

152152
for chunk in (chunker(payload, i, xerial_blocksize)
153-
for i in xrange(0, len(payload), xerial_blocksize)):
153+
for i in range(0, len(payload), xerial_blocksize)):
154154

155155
block = snappy.compress(chunk)
156156
block_size = len(block)

kafka/producer/simple.py

+2-2
Original file line numberDiff line numberDiff line change
@@ -4,7 +4,7 @@
44
import logging
55
import random
66

7-
from kafka.vendor.six.moves import xrange # pylint: disable=import-error
7+
from kafka.vendor.six.moves import range
88

99
from kafka.producer.base import Producer
1010

@@ -39,7 +39,7 @@ def _next_partition(self, topic):
3939
# Randomize the initial partition that is returned
4040
if self.random_start:
4141
num_partitions = len(self.client.get_partition_ids_for_topic(topic))
42-
for _ in xrange(random.randint(0, num_partitions-1)):
42+
for _ in range(random.randint(0, num_partitions-1)):
4343
next(self.partition_cycles[topic])
4444

4545
return next(self.partition_cycles[topic])

test/fixtures.py

+3-3
Original file line numberDiff line numberDiff line change
@@ -12,8 +12,8 @@
1212
import uuid
1313

1414
import py
15-
from six.moves import urllib, xrange
16-
from six.moves.urllib.parse import urlparse # pylint: disable=E0611,F0401
15+
from kafka.vendor.six.moves import urllib, range
16+
from kafka.vendor.six.moves.urllib.parse import urlparse # pylint: disable=E0611,F0401
1717

1818
from kafka import errors, KafkaConsumer, KafkaProducer, SimpleClient
1919
from kafka.client_async import KafkaClient
@@ -24,7 +24,7 @@
2424
log = logging.getLogger(__name__)
2525

2626
def random_string(length):
27-
return "".join(random.choice(string.ascii_letters) for i in xrange(length))
27+
return "".join(random.choice(string.ascii_letters) for i in range(length))
2828

2929
def version_str_to_list(version_str):
3030
return tuple(map(int, version_str.split('.'))) # e.g., (0, 8, 1, 1)

test/test_client.py

+1-1
Original file line numberDiff line numberDiff line change
@@ -2,7 +2,7 @@
22

33
from mock import ANY, MagicMock, patch
44
from operator import itemgetter
5-
import six
5+
from kafka.vendor import six
66
from . import unittest
77

88
from kafka import SimpleClient

test/test_codec.py

+6-6
Original file line numberDiff line numberDiff line change
@@ -4,7 +4,7 @@
44
import struct
55

66
import pytest
7-
from six.moves import xrange
7+
from kafka.vendor.six.moves import range
88

99
from kafka.codec import (
1010
has_snappy, has_gzip, has_lz4,
@@ -18,15 +18,15 @@
1818

1919

2020
def test_gzip():
21-
for i in xrange(1000):
21+
for i in range(1000):
2222
b1 = random_string(100).encode('utf-8')
2323
b2 = gzip_decode(gzip_encode(b1))
2424
assert b1 == b2
2525

2626

2727
@pytest.mark.skipif(not has_snappy(), reason="Snappy not available")
2828
def test_snappy():
29-
for i in xrange(1000):
29+
for i in range(1000):
3030
b1 = random_string(100).encode('utf-8')
3131
b2 = snappy_decode(snappy_encode(b1))
3232
assert b1 == b2
@@ -86,7 +86,7 @@ def test_snappy_encode_xerial():
8686
@pytest.mark.skipif(not has_lz4() or platform.python_implementation() == 'PyPy',
8787
reason="python-lz4 crashes on old versions of pypy")
8888
def test_lz4():
89-
for i in xrange(1000):
89+
for i in range(1000):
9090
b1 = random_string(100).encode('utf-8')
9191
b2 = lz4_decode(lz4_encode(b1))
9292
assert len(b1) == len(b2)
@@ -96,7 +96,7 @@ def test_lz4():
9696
@pytest.mark.skipif(not has_lz4() or platform.python_implementation() == 'PyPy',
9797
reason="python-lz4 crashes on old versions of pypy")
9898
def test_lz4_old():
99-
for i in xrange(1000):
99+
for i in range(1000):
100100
b1 = random_string(100).encode('utf-8')
101101
b2 = lz4_decode_old_kafka(lz4_encode_old_kafka(b1))
102102
assert len(b1) == len(b2)
@@ -106,7 +106,7 @@ def test_lz4_old():
106106
@pytest.mark.skipif(not has_lz4() or platform.python_implementation() == 'PyPy',
107107
reason="python-lz4 crashes on old versions of pypy")
108108
def test_lz4_incremental():
109-
for i in xrange(1000):
109+
for i in range(1000):
110110
# lz4 max single block size is 4MB
111111
# make sure we test with multiple-blocks
112112
b1 = random_string(100).encode('utf-8') * 50000

test/test_consumer_group.py

+1-1
Original file line numberDiff line numberDiff line change
@@ -4,7 +4,7 @@
44
import time
55

66
import pytest
7-
import six
7+
from kafka.vendor import six
88

99
from kafka import SimpleClient
1010
from kafka.conn import ConnectionStates

test/test_consumer_integration.py

+5-5
Original file line numberDiff line numberDiff line change
@@ -6,8 +6,8 @@
66
import kafka.codec
77

88
import pytest
9-
from six.moves import xrange
10-
import six
9+
from kafka.vendor.six.moves import range
10+
from kafka.vendor import six
1111

1212
from . import unittest
1313
from kafka import (
@@ -473,7 +473,7 @@ def test_offset_behavior__resuming_behavior(self):
473473
)
474474

475475
# Grab the first 195 messages
476-
output_msgs1 = [ consumer1.get_message().message.value for _ in xrange(195) ]
476+
output_msgs1 = [ consumer1.get_message().message.value for _ in range(195) ]
477477
self.assert_message_count(output_msgs1, 195)
478478

479479
# The total offset across both partitions should be at 180
@@ -603,7 +603,7 @@ def test_kafka_consumer__offset_commit_resume(self):
603603

604604
# Grab the first 180 messages
605605
output_msgs1 = []
606-
for _ in xrange(180):
606+
for _ in range(180):
607607
m = next(consumer1)
608608
output_msgs1.append(m)
609609
self.assert_message_count(output_msgs1, 180)
@@ -619,7 +619,7 @@ def test_kafka_consumer__offset_commit_resume(self):
619619

620620
# 181-200
621621
output_msgs2 = []
622-
for _ in xrange(20):
622+
for _ in range(20):
623623
m = next(consumer2)
624624
output_msgs2.append(m)
625625
self.assert_message_count(output_msgs2, 20)

test/test_producer_integration.py

+1-1
Original file line numberDiff line numberDiff line change
@@ -3,7 +3,7 @@
33
import uuid
44

55
import pytest
6-
from six.moves import range
6+
from kafka.vendor.six.moves import range
77

88
from kafka import (
99
SimpleProducer, KeyedProducer,

test/test_producer_legacy.py

+3-3
Original file line numberDiff line numberDiff line change
@@ -16,7 +16,7 @@
1616
from kafka.structs import (
1717
ProduceResponsePayload, RetryOptions, TopicPartition)
1818

19-
from six.moves import queue, xrange
19+
from kafka.vendor.six.moves import queue, range
2020

2121

2222
class TestKafkaProducer(unittest.TestCase):
@@ -84,7 +84,7 @@ def test_producer_async_queue_overfilled(self, mock):
8484
message_list = [message] * (queue_size + 1)
8585
producer.send_messages(topic, partition, *message_list)
8686
self.assertEqual(producer.queue.qsize(), queue_size)
87-
for _ in xrange(producer.queue.qsize()):
87+
for _ in range(producer.queue.qsize()):
8888
producer.queue.get()
8989

9090
def test_producer_sync_fail_on_error(self):
@@ -253,5 +253,5 @@ def send_side_effect(reqs, *args, **kwargs):
253253
self.assertEqual(self.client.send_produce_request.call_count, 5)
254254

255255
def tearDown(self):
256-
for _ in xrange(self.queue.qsize()):
256+
for _ in range(self.queue.qsize()):
257257
self.queue.get()

test/test_protocol.py

+1-1
Original file line numberDiff line numberDiff line change
@@ -3,7 +3,7 @@
33
import struct
44

55
import pytest
6-
import six
6+
from kafka.vendor import six
77

88
from kafka.protocol.api import RequestHeader
99
from kafka.protocol.commit import GroupCoordinatorRequest

test/test_protocol_legacy.py

+1-1
Original file line numberDiff line numberDiff line change
@@ -2,7 +2,7 @@
22
from contextlib import contextmanager
33
import struct
44

5-
import six
5+
from kafka.vendor import six
66
from mock import patch, sentinel
77
from . import unittest
88

test/test_util.py

+1-1
Original file line numberDiff line numberDiff line change
@@ -1,7 +1,7 @@
11
# -*- coding: utf-8 -*-
22
import struct
33

4-
import six
4+
from kafka.vendor import six
55
from . import unittest
66

77
import kafka.errors

0 commit comments

Comments
 (0)