Remove dead code (#967)

This commit is contained in:
Jeff Widman
2017-02-28 10:18:02 -08:00
committed by Dana Powers
parent 432f00eb66
commit 2c23a27483
2 changed files with 0 additions and 78 deletions

View File

@@ -4,7 +4,6 @@ import atexit
import binascii
import collections
import struct
import sys
from threading import Thread, Event
import weakref
@@ -33,19 +32,6 @@ def write_int_string(s):
return struct.pack('>i%ds' % len(s), len(s), s)
def write_short_string(s):
if s is not None and not isinstance(s, six.binary_type):
raise TypeError('Expected "%s" to be bytes\n'
'data=%s' % (type(s), repr(s)))
if s is None:
return struct.pack('>h', -1)
elif len(s) > 32767 and sys.version_info < (2, 7):
# Python 2.6 issues a deprecation warning instead of a struct error
raise struct.error(len(s))
else:
return struct.pack('>h%ds' % len(s), len(s), s)
def read_short_string(data, cur):
if len(data) < cur + 2:
raise BufferUnderflowError("Not enough data left")
@@ -62,24 +48,6 @@ def read_short_string(data, cur):
return out, cur + strlen
def read_int_string(data, cur):
if len(data) < cur + 4:
raise BufferUnderflowError(
"Not enough data left to read string len (%d < %d)" %
(len(data), cur + 4))
(strlen,) = struct.unpack('>i', data[cur:cur + 4])
if strlen == -1:
return None, cur + 4
cur += 4
if len(data) < cur + strlen:
raise BufferUnderflowError("Not enough data left")
out = data[cur:cur + strlen]
return out, cur + strlen
def relative_unpack(fmt, data, cur):
size = struct.calcsize(fmt)
if len(data) < cur + size:

View File

@@ -43,57 +43,11 @@ class UtilTest(unittest.TestCase):
b'\xff\xff\xff\xff'
)
def test_read_int_string(self):
self.assertEqual(kafka.util.read_int_string(b'\xff\xff\xff\xff', 0), (None, 4))
self.assertEqual(kafka.util.read_int_string(b'\x00\x00\x00\x00', 0), (b'', 4))
self.assertEqual(kafka.util.read_int_string(b'\x00\x00\x00\x0bsome string', 0), (b'some string', 15))
def test_read_int_string__insufficient_data(self):
with self.assertRaises(kafka.errors.BufferUnderflowError):
kafka.util.read_int_string(b'\x00\x00\x00\x021', 0)
def test_write_short_string(self):
self.assertEqual(
kafka.util.write_short_string(b'some string'),
b'\x00\x0bsome string'
)
def test_write_short_string__unicode(self):
with self.assertRaises(TypeError) as cm:
kafka.util.write_short_string(u'hello')
#: :type: TypeError
te = cm.exception
if six.PY2:
self.assertIn('unicode', str(te))
else:
self.assertIn('str', str(te))
self.assertIn('to be bytes', str(te))
def test_write_short_string__empty(self):
self.assertEqual(
kafka.util.write_short_string(b''),
b'\x00\x00'
)
def test_write_short_string__null(self):
self.assertEqual(
kafka.util.write_short_string(None),
b'\xff\xff'
)
def test_write_short_string__too_long(self):
with self.assertRaises(struct.error):
kafka.util.write_short_string(b' ' * 33000)
def test_read_short_string(self):
self.assertEqual(kafka.util.read_short_string(b'\xff\xff', 0), (None, 2))
self.assertEqual(kafka.util.read_short_string(b'\x00\x00', 0), (b'', 2))
self.assertEqual(kafka.util.read_short_string(b'\x00\x0bsome string', 0), (b'some string', 13))
def test_read_int_string__insufficient_data2(self):
with self.assertRaises(kafka.errors.BufferUnderflowError):
kafka.util.read_int_string('\x00\x021', 0)
def test_relative_unpack2(self):
self.assertEqual(
kafka.util.relative_unpack('>hh', b'\x00\x01\x00\x00\x02', 0),