Skip to content
This repository was archived by the owner on Jul 26, 2022. It is now read-only.

handle pandas api change #5

Open
wants to merge 3 commits into
base: master
Choose a base branch
from
Open
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
15 changes: 12 additions & 3 deletions pandas_msgpack/packers.py
Original file line number Diff line number Diff line change
@@ -55,10 +55,19 @@
Index, MultiIndex, Float64Index, Int64Index,
Panel, RangeIndex, PeriodIndex, DatetimeIndex, NaT,
Categorical, CategoricalIndex)
from pandas.sparse.api import SparseSeries, SparseDataFrame
from pandas.sparse.array import BlockIndex, IntIndex
try:
from pandas import SparseSeries, SparseDataFrame
except ImportError:
from pandas.sparse.api import SparseSeries, SparseDataFrame
try:
from pandas.core.sparse.array import BlockIndex, IntIndex
except ImportError:
from pandas.sparse.array import BlockIndex, IntIndex
from pandas.core.generic import NDFrame
from pandas.core.common import PerformanceWarning
try:
from pandas.errors import PerformanceWarning
except ImportError:
from pandas.core.common import PerformanceWarning
from pandas.io.common import get_filepath_or_buffer
from pandas.core.internals import BlockManager, make_block, _safe_reshape
import pandas.core.internals as internals
55 changes: 30 additions & 25 deletions pandas_msgpack/tests/test_packers.py
Original file line number Diff line number Diff line change
@@ -14,7 +14,10 @@
from pandas import (Series, DataFrame, Panel, MultiIndex, bdate_range,
date_range, period_range, Index, Categorical)
from pandas.api.types import is_datetime64tz_dtype
from pandas.core.common import PerformanceWarning
try:
from pandas.errors import PerformanceWarning
except:
from pandas.core.common import PerformanceWarning
import pandas.util.testing as tm
from pandas.util.testing import (ensure_clean,
assert_categorical_equal,
@@ -92,13 +95,15 @@ def check_arbitrary(a, b):
assert(a == b)


class TestPackers(tm.TestCase):
class TestPackers(object):

def setUp(self):
self.path = '__%s__.msg' % tm.rands(10)
@classmethod
def setup_class(cls):
cls.path = '__%s__.msg' % tm.rands(10)

def tearDown(self):
pass
@classmethod
def teardown_class(cls):
cls.path = None

def encode_decode(self, x, compress=None, **kwargs):
with ensure_clean(self.path) as p:
@@ -165,7 +170,7 @@ def test_numpy_scalar_float(self):
def test_numpy_scalar_complex(self):
x = np.complex64(np.random.rand() + 1j * np.random.rand())
x_rec = self.encode_decode(x)
self.assertTrue(np.allclose(x, x_rec))
assert np.allclose(x, x_rec)

def test_scalar_float(self):
x = np.random.rand()
@@ -175,7 +180,7 @@ def test_scalar_float(self):
def test_scalar_complex(self):
x = np.random.rand() + 1j * np.random.rand()
x_rec = self.encode_decode(x)
self.assertTrue(np.allclose(x, x_rec))
assert np.allclose(x, x_rec)

def test_list_numpy_float(self):
x = [np.float32(np.random.rand()) for i in range(5)]
@@ -194,7 +199,7 @@ def test_list_numpy_float_complex(self):
[np.complex128(np.random.rand() + 1j * np.random.rand())
for i in range(5)]
x_rec = self.encode_decode(x)
self.assertTrue(np.allclose(x, x_rec))
assert np.allclose(x, x_rec)

def test_list_float(self):
x = [np.random.rand() for i in range(5)]
@@ -209,7 +214,7 @@ def test_list_float_complex(self):
x = [np.random.rand() for i in range(5)] + \
[(np.random.rand() + 1j * np.random.rand()) for i in range(5)]
x_rec = self.encode_decode(x)
self.assertTrue(np.allclose(x, x_rec))
assert np.allclose(x, x_rec)

def test_dict_float(self):
x = {'foo': 1.0, 'bar': 2.0}
@@ -249,8 +254,7 @@ def test_numpy_array_float(self):
def test_numpy_array_complex(self):
x = (np.random.rand(5) + 1j * np.random.rand(5)).astype(np.complex128)
x_rec = self.encode_decode(x)
self.assertTrue(all(map(lambda x, y: x == y, x, x_rec)) and
x.dtype == x_rec.dtype)
assert all(map(lambda x, y: x == y, x, x_rec)) and x.dtype == x_rec.dtype

def test_list_mixed(self):
x = [1.0, np.float32(3.5), np.complex128(4.25), u('foo')]
@@ -619,7 +623,7 @@ def _test_compression(self, compress):
assert_frame_equal(value, expected)
# make sure that we can write to the new frames
for block in value._data.blocks:
self.assertTrue(block.values.flags.writeable)
assert block.values.flags.writeable

def test_compression_zlib(self):
if not _ZLIB_INSTALLED:
@@ -668,17 +672,18 @@ def decompress(ob):
# make sure that we can write to the new frames even though
# we needed to copy the data
for block in value._data.blocks:
self.assertTrue(block.values.flags.writeable)
assert block.values.flags.writeable
# mutate the data in some way
block.values[0] += rhs[block.dtype]

for w in ws:
# check the messages from our warnings
self.assertEqual(
str(w.message),
'copying data after decompressing; this may mean that'
' decompress is caching its result',
)
if not isinstance(w, DeprecationWarning):
self.assertEqual(
str(w.message),
'copying data after decompressing; this may mean that'
' decompress is caching its result',
)

for buf, control_buf in zip(not_garbage, control):
# make sure none of our mutations above affected the
@@ -701,14 +706,14 @@ def _test_small_strings_no_warn(self, compress):
empty_unpacked = self.encode_decode(empty, compress=compress)

tm.assert_numpy_array_equal(empty_unpacked, empty)
self.assertTrue(empty_unpacked.flags.writeable)
assert empty_unpacked.flags.writeable

char = np.array([ord(b'a')], dtype='uint8')
with tm.assert_produces_warning(None):
char_unpacked = self.encode_decode(char, compress=compress)

tm.assert_numpy_array_equal(char_unpacked, char)
self.assertTrue(char_unpacked.flags.writeable)
assert char_unpacked.flags.writeable
# if this test fails I am sorry because the interpreter is now in a
# bad state where b'a' points to 98 == ord(b'b').
char_unpacked[0] = ord(b'b')
@@ -738,15 +743,15 @@ def test_readonly_axis_blosc(self):
pytest.skip('no blosc')
df1 = DataFrame({'A': list('abcd')})
df2 = DataFrame(df1, index=[1., 2., 3., 4.])
self.assertTrue(1 in self.encode_decode(df1['A'], compress='blosc'))
self.assertTrue(1. in self.encode_decode(df2['A'], compress='blosc'))
assert 1 in self.encode_decode(df1['A'], compress='blosc')
assert 1. in self.encode_decode(df2['A'], compress='blosc')

def test_readonly_axis_zlib(self):
# GH11880
df1 = DataFrame({'A': list('abcd')})
df2 = DataFrame(df1, index=[1., 2., 3., 4.])
self.assertTrue(1 in self.encode_decode(df1['A'], compress='zlib'))
self.assertTrue(1. in self.encode_decode(df2['A'], compress='zlib'))
assert 1 in self.encode_decode(df1['A'], compress='zlib')
assert 1. in self.encode_decode(df2['A'], compress='zlib')

def test_readonly_axis_blosc_to_sql(self):
# GH11880