Skip to content
This commit does not belong to any branch on this repository, and may belong to a fork outside of the repository.

Commit fdd19a5

Browse files
sinhrksjreback
authored andcommittedMay 31, 2017
CLN/BUG: fix ndarray assignment may cause unexpected cast
supersedes #14145 closes #14001
1 parent 4ca29f4 commit fdd19a5

File tree

11 files changed

+309
-77
lines changed

11 files changed

+309
-77
lines changed
 

‎doc/source/whatsnew/v0.21.0.txt

Lines changed: 4 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -50,6 +50,7 @@ Backwards incompatible API changes
5050
- Accessing a non-existent attribute on a closed :class:`HDFStore` will now
5151
raise an ``AttributeError`` rather than a ``ClosedFileError`` (:issue:`16301`)
5252

53+
5354
.. _whatsnew_0210.api:
5455

5556
Other API Changes
@@ -87,6 +88,9 @@ Bug Fixes
8788
Conversion
8889
^^^^^^^^^^
8990

91+
- Bug in assignment against datetime-like data with ``int`` may incorrectly converted to datetime-like (:issue:`14145`)
92+
- Bug in assignment against ``int64`` data with ``np.ndarray`` with ``float64`` dtype may keep ``int64`` dtype (:issue:`14001`)
93+
9094

9195

9296
Indexing

‎pandas/core/dtypes/cast.py

Lines changed: 20 additions & 4 deletions
Original file line numberDiff line numberDiff line change
@@ -272,7 +272,7 @@ def maybe_promote(dtype, fill_value=np.nan):
272272
else:
273273
if issubclass(dtype.type, np.datetime64):
274274
try:
275-
fill_value = lib.Timestamp(fill_value).value
275+
fill_value = Timestamp(fill_value).value
276276
except:
277277
# the proper thing to do here would probably be to upcast
278278
# to object (but numpy 1.6.1 doesn't do this properly)
@@ -349,9 +349,9 @@ def infer_dtype_from_scalar(val, pandas_dtype=False):
349349

350350
# a 1-element ndarray
351351
if isinstance(val, np.ndarray):
352+
msg = "invalid ndarray passed to _infer_dtype_from_scalar"
352353
if val.ndim != 0:
353-
raise ValueError(
354-
"invalid ndarray passed to _infer_dtype_from_scalar")
354+
raise ValueError(msg)
355355

356356
dtype = val.dtype
357357
val = val.item()
@@ -552,7 +552,7 @@ def conv(r, dtype):
552552
if isnull(r):
553553
pass
554554
elif dtype == _NS_DTYPE:
555-
r = lib.Timestamp(r)
555+
r = Timestamp(r)
556556
elif dtype == _TD_DTYPE:
557557
r = _coerce_scalar_to_timedelta_type(r)
558558
elif dtype == np.bool_:
@@ -1026,3 +1026,19 @@ def find_common_type(types):
10261026
return np.object
10271027

10281028
return np.find_common_type(types, [])
1029+
1030+
1031+
def _cast_scalar_to_array(shape, value, dtype=None):
1032+
"""
1033+
create np.ndarray of specified shape and dtype, filled with values
1034+
"""
1035+
1036+
if dtype is None:
1037+
dtype, fill_value = _infer_dtype_from_scalar(value)
1038+
else:
1039+
fill_value = value
1040+
1041+
values = np.empty(shape, dtype=dtype)
1042+
values.fill(fill_value)
1043+
1044+
return values

‎pandas/core/frame.py

Lines changed: 8 additions & 13 deletions
Original file line numberDiff line numberDiff line change
@@ -59,6 +59,7 @@
5959
is_named_tuple)
6060
from pandas.core.dtypes.missing import isnull, notnull
6161

62+
6263
from pandas.core.common import (_try_sort,
6364
_default_index,
6465
_values_from_object,
@@ -355,15 +356,10 @@ def __init__(self, data=None, index=None, columns=None, dtype=None,
355356
raise_with_traceback(exc)
356357

357358
if arr.ndim == 0 and index is not None and columns is not None:
358-
if isinstance(data, compat.string_types) and dtype is None:
359-
dtype = np.object_
360-
if dtype is None:
361-
dtype, data = infer_dtype_from_scalar(data)
362-
363-
values = np.empty((len(index), len(columns)), dtype=dtype)
364-
values.fill(data)
365-
mgr = self._init_ndarray(values, index, columns, dtype=dtype,
366-
copy=False)
359+
values = _cast_scalar_to_array((len(index), len(columns)),
360+
data, dtype=dtype)
361+
mgr = self._init_ndarray(values, index, columns,
362+
dtype=values.dtype, copy=False)
367363
else:
368364
raise ValueError('DataFrame constructor not properly called!')
369365

@@ -477,7 +473,7 @@ def _get_axes(N, K, index=index, columns=columns):
477473
values = _prep_ndarray(values, copy=copy)
478474

479475
if dtype is not None:
480-
if values.dtype != dtype:
476+
if not is_dtype_equal(values.dtype, dtype):
481477
try:
482478
values = values.astype(dtype)
483479
except Exception as orig:
@@ -2653,9 +2649,8 @@ def reindexer(value):
26532649

26542650
else:
26552651
# upcast the scalar
2656-
dtype, value = infer_dtype_from_scalar(value)
2657-
value = np.repeat(value, len(self.index)).astype(dtype)
2658-
value = maybe_cast_to_datetime(value, dtype)
2652+
value = _cast_scalar_to_array(len(self.index), value)
2653+
value = _possibly_cast_to_datetime(value, value.dtype)
26592654

26602655
# return internal types directly
26612656
if is_extension_type(value):

‎pandas/core/internals.py

Lines changed: 88 additions & 32 deletions
Original file line numberDiff line numberDiff line change
@@ -388,7 +388,8 @@ def fillna(self, value, limit=None, inplace=False, downcast=None,
388388

389389
# fillna, but if we cannot coerce, then try again as an ObjectBlock
390390
try:
391-
values, _, value, _ = self._try_coerce_args(self.values, value)
391+
values, _, _, _ = self._try_coerce_args(self.values, value)
392+
# value may be converted to internal, thus drop
392393
blocks = self.putmask(mask, value, inplace=inplace)
393394
blocks = [b.make_block(values=self._try_coerce_result(b.values))
394395
for b in blocks]
@@ -682,8 +683,43 @@ def setitem(self, indexer, value, mgr=None):
682683
if self.is_numeric:
683684
value = np.nan
684685

685-
# coerce args
686-
values, _, value, _ = self._try_coerce_args(self.values, value)
686+
# coerce if block dtype can store value
687+
values = self.values
688+
try:
689+
values, _, value, _ = self._try_coerce_args(values, value)
690+
# can keep its own dtype
691+
if hasattr(value, 'dtype') and is_dtype_equal(values.dtype,
692+
value.dtype):
693+
dtype = self.dtype
694+
else:
695+
dtype = 'infer'
696+
697+
except (TypeError, ValueError):
698+
# current dtype cannot store value, coerce to common dtype
699+
find_dtype = False
700+
701+
if hasattr(value, 'dtype'):
702+
dtype = value.dtype
703+
find_dtype = True
704+
705+
elif is_scalar(value):
706+
if isnull(value):
707+
# NaN promotion is handled in latter path
708+
dtype = False
709+
else:
710+
dtype, _ = _infer_dtype_from_scalar(value,
711+
pandas_dtype=True)
712+
find_dtype = True
713+
else:
714+
dtype = 'infer'
715+
716+
if find_dtype:
717+
dtype = _find_common_type([values.dtype, dtype])
718+
if not is_dtype_equal(self.dtype, dtype):
719+
b = self.astype(dtype)
720+
return b.setitem(indexer, value, mgr=mgr)
721+
722+
# value must be storeable at this moment
687723
arr_value = np.array(value)
688724

689725
# cast the values to a type that can hold nan (if necessary)
@@ -713,19 +749,8 @@ def setitem(self, indexer, value, mgr=None):
713749
raise ValueError("cannot set using a slice indexer with a "
714750
"different length than the value")
715751

716-
try:
717-
718-
def _is_scalar_indexer(indexer):
719-
# return True if we are all scalar indexers
720-
721-
if arr_value.ndim == 1:
722-
if not isinstance(indexer, tuple):
723-
indexer = tuple([indexer])
724-
return all([is_scalar(idx) for idx in indexer])
725-
return False
726-
727-
def _is_empty_indexer(indexer):
728-
# return a boolean if we have an empty indexer
752+
def _is_scalar_indexer(indexer):
753+
# return True if we are all scalar indexers
729754

730755
if arr_value.ndim == 1:
731756
if not isinstance(indexer, tuple):
@@ -777,23 +802,43 @@ def _is_empty_indexer(indexer):
777802
raise
778803
except TypeError:
779804

780-
# cast to the passed dtype if possible
781-
# otherwise raise the original error
782-
try:
783-
# e.g. we are uint32 and our value is uint64
784-
# this is for compat with older numpies
785-
block = self.make_block(transf(values.astype(value.dtype)))
786-
return block.setitem(indexer=indexer, value=value, mgr=mgr)
805+
def _is_empty_indexer(indexer):
806+
# return a boolean if we have an empty indexer
787807

788-
except:
789-
pass
790-
791-
raise
808+
if arr_value.ndim == 1:
809+
if not isinstance(indexer, tuple):
810+
indexer = tuple([indexer])
811+
return any(isinstance(idx, np.ndarray) and len(idx) == 0
812+
for idx in indexer)
813+
return False
792814

793-
except Exception:
815+
# empty indexers
816+
# 8669 (empty)
817+
if _is_empty_indexer(indexer):
794818
pass
795819

796-
return [self]
820+
# setting a single element for each dim and with a rhs that could
821+
# be say a list
822+
# GH 6043
823+
elif _is_scalar_indexer(indexer):
824+
values[indexer] = value
825+
826+
# if we are an exact match (ex-broadcasting),
827+
# then use the resultant dtype
828+
elif (len(arr_value.shape) and
829+
arr_value.shape[0] == values.shape[0] and
830+
np.prod(arr_value.shape) == np.prod(values.shape)):
831+
values[indexer] = value
832+
values = values.astype(arr_value.dtype)
833+
834+
# set
835+
else:
836+
values[indexer] = value
837+
838+
# coerce and try to infer the dtypes of the result
839+
values = self._try_coerce_and_cast_result(values, dtype)
840+
block = self.make_block(transf(values), fastpath=True)
841+
return block
797842

798843
def putmask(self, mask, new, align=True, inplace=False, axis=0,
799844
transpose=False, mgr=None):
@@ -1264,6 +1309,7 @@ def func(cond, values, other):
12641309

12651310
values, values_mask, other, other_mask = self._try_coerce_args(
12661311
values, other)
1312+
12671313
try:
12681314
return self._try_coerce_result(expressions.where(
12691315
cond, values, other, raise_on_error=True))
@@ -1543,6 +1589,7 @@ def putmask(self, mask, new, align=True, inplace=False, axis=0,
15431589
new = new[mask]
15441590

15451591
mask = _safe_reshape(mask, new_values.shape)
1592+
15461593
new_values[mask] = new
15471594
new_values = self._try_coerce_result(new_values)
15481595
return [self.make_block(values=new_values)]
@@ -1712,7 +1759,7 @@ def fillna(self, value, **kwargs):
17121759

17131760
# allow filling with integers to be
17141761
# interpreted as seconds
1715-
if not isinstance(value, np.timedelta64) and is_integer(value):
1762+
if not isinstance(value, np.timedelta64):
17161763
value = Timedelta(value, unit='s')
17171764
return super(TimeDeltaBlock, self).fillna(value, **kwargs)
17181765

@@ -1949,6 +1996,15 @@ def _maybe_downcast(self, blocks, downcast=None):
19491996
def _can_hold_element(self, element):
19501997
return True
19511998

1999+
def _try_coerce_args(self, values, other):
2000+
""" provide coercion to our input arguments """
2001+
2002+
if isinstance(other, ABCDatetimeIndex):
2003+
# to store DatetimeTZBlock as object
2004+
other = other.asobject.values
2005+
2006+
return values, False, other, False
2007+
19522008
def _try_cast(self, element):
19532009
return element
19542010

@@ -2288,8 +2344,6 @@ def _try_coerce_args(self, values, other):
22882344
"naive Block")
22892345
other_mask = isnull(other)
22902346
other = other.asm8.view('i8')
2291-
elif hasattr(other, 'dtype') and is_integer_dtype(other):
2292-
other = other.view('i8')
22932347
else:
22942348
try:
22952349
other = np.asarray(other)
@@ -2466,6 +2520,8 @@ def _try_coerce_args(self, values, other):
24662520
raise ValueError("incompatible or non tz-aware value")
24672521
other_mask = isnull(other)
24682522
other = other.value
2523+
else:
2524+
raise TypeError
24692525

24702526
return values, values_mask, other, other_mask
24712527

‎pandas/core/panel.py

Lines changed: 4 additions & 8 deletions
Original file line numberDiff line numberDiff line change
@@ -178,11 +178,9 @@ def _init_data(self, data, copy, dtype, **kwargs):
178178
copy = False
179179
dtype = None
180180
elif is_scalar(data) and all(x is not None for x in passed_axes):
181-
if dtype is None:
182-
dtype, data = infer_dtype_from_scalar(data)
183-
values = np.empty([len(x) for x in passed_axes], dtype=dtype)
184-
values.fill(data)
185-
mgr = self._init_matrix(values, passed_axes, dtype=dtype,
181+
values = _cast_scalar_to_array([len(x) for x in passed_axes],
182+
data, dtype=dtype)
183+
mgr = self._init_matrix(values, passed_axes, dtype=values.dtype,
186184
copy=False)
187185
copy = False
188186
else: # pragma: no cover
@@ -582,9 +580,7 @@ def __setitem__(self, key, value):
582580
shape[1:], tuple(map(int, value.shape))))
583581
mat = np.asarray(value)
584582
elif is_scalar(value):
585-
dtype, value = infer_dtype_from_scalar(value)
586-
mat = np.empty(shape[1:], dtype=dtype)
587-
mat.fill(value)
583+
mat = _cast_scalar_to_array(shape[1:], value)
588584
else:
589585
raise TypeError('Cannot set item of type: %s' % str(type(value)))
590586

‎pandas/tests/dtypes/test_cast.py

Lines changed: 97 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -332,3 +332,100 @@ def test_period_dtype(self):
332332
np.dtype('datetime64[ns]'), np.object, np.int64]:
333333
assert find_common_type([dtype, dtype2]) == np.object
334334
assert find_common_type([dtype2, dtype]) == np.object
335+
336+
337+
class TestInferDtype2(tm.TestCase):
338+
339+
def test_infer_dtype_from_scalar(self):
340+
# Test that _infer_dtype_from_scalar is returning correct dtype for int
341+
# and float.
342+
343+
for dtypec in [np.uint8, np.int8, np.uint16, np.int16, np.uint32,
344+
np.int32, np.uint64, np.int64]:
345+
data = dtypec(12)
346+
dtype, val = _infer_dtype_from_scalar(data)
347+
self.assertEqual(dtype, type(data))
348+
349+
data = 12
350+
dtype, val = _infer_dtype_from_scalar(data)
351+
self.assertEqual(dtype, np.int64)
352+
353+
for dtypec in [np.float16, np.float32, np.float64]:
354+
data = dtypec(12)
355+
dtype, val = _infer_dtype_from_scalar(data)
356+
self.assertEqual(dtype, dtypec)
357+
358+
data = np.float(12)
359+
dtype, val = _infer_dtype_from_scalar(data)
360+
self.assertEqual(dtype, np.float64)
361+
362+
for data in [True, False]:
363+
dtype, val = _infer_dtype_from_scalar(data)
364+
self.assertEqual(dtype, np.bool_)
365+
366+
for data in [np.complex64(1), np.complex128(1)]:
367+
dtype, val = _infer_dtype_from_scalar(data)
368+
self.assertEqual(dtype, np.complex_)
369+
370+
import datetime
371+
for data in [np.datetime64(1, 'ns'), Timestamp(1),
372+
datetime.datetime(2000, 1, 1, 0, 0)]:
373+
dtype, val = _infer_dtype_from_scalar(data)
374+
self.assertEqual(dtype, 'M8[ns]')
375+
376+
for data in [np.timedelta64(1, 'ns'), Timedelta(1),
377+
datetime.timedelta(1)]:
378+
dtype, val = _infer_dtype_from_scalar(data)
379+
self.assertEqual(dtype, 'm8[ns]')
380+
381+
for tz in ['UTC', 'US/Eastern', 'Asia/Tokyo']:
382+
dt = Timestamp(1, tz=tz)
383+
dtype, val = _infer_dtype_from_scalar(dt, pandas_dtype=True)
384+
self.assertEqual(dtype, 'datetime64[ns, {0}]'.format(tz))
385+
self.assertEqual(val, dt.value)
386+
387+
dtype, val = _infer_dtype_from_scalar(dt)
388+
self.assertEqual(dtype, np.object_)
389+
self.assertEqual(val, dt)
390+
391+
for freq in ['M', 'D']:
392+
p = Period('2011-01-01', freq=freq)
393+
dtype, val = _infer_dtype_from_scalar(p, pandas_dtype=True)
394+
self.assertEqual(dtype, 'period[{0}]'.format(freq))
395+
self.assertEqual(val, p.ordinal)
396+
397+
dtype, val = _infer_dtype_from_scalar(p)
398+
self.assertEqual(dtype, np.object_)
399+
self.assertEqual(val, p)
400+
401+
for data in [datetime.date(2000, 1, 1), 'foo']:
402+
dtype, val = _infer_dtype_from_scalar(data)
403+
self.assertEqual(dtype, np.object_)
404+
405+
def test_cast_scalar_to_array(self):
406+
arr = _cast_scalar_to_array((3, 2), 1, dtype=np.int64)
407+
exp = np.ones((3, 2), dtype=np.int64)
408+
tm.assert_numpy_array_equal(arr, exp)
409+
410+
arr = _cast_scalar_to_array((3, 2), 1.1)
411+
exp = np.empty((3, 2), dtype=np.float64)
412+
exp.fill(1.1)
413+
tm.assert_numpy_array_equal(arr, exp)
414+
415+
arr = _cast_scalar_to_array((2, 3), Timestamp('2011-01-01'))
416+
exp = np.empty((2, 3), dtype='datetime64[ns]')
417+
exp.fill(np.datetime64('2011-01-01'))
418+
tm.assert_numpy_array_equal(arr, exp)
419+
420+
# pandas dtype is stored as object dtype
421+
obj = Timestamp('2011-01-01', tz='US/Eastern')
422+
arr = _cast_scalar_to_array((2, 3), obj)
423+
exp = np.empty((2, 3), dtype=np.object)
424+
exp.fill(obj)
425+
tm.assert_numpy_array_equal(arr, exp)
426+
427+
obj = Period('2011-01-01', freq='D')
428+
arr = _cast_scalar_to_array((2, 3), obj)
429+
exp = np.empty((2, 3), dtype=np.object)
430+
exp.fill(obj)
431+
tm.assert_numpy_array_equal(arr, exp)

‎pandas/tests/dtypes/test_convert.py

Whitespace-only changes.

‎pandas/tests/indexing/test_coercion.py

Lines changed: 71 additions & 10 deletions
Original file line numberDiff line numberDiff line change
@@ -105,6 +105,19 @@ def test_setitem_series_int64(self):
105105
exp = pd.Series([1, 1, 3, 4])
106106
self._assert_setitem_series_conversion(obj, True, exp, np.int64)
107107

108+
def test_setitem_series_int8(self):
109+
# integer dtype coercion (no change)
110+
obj = pd.Series([1, 2, 3, 4], dtype=np.int8)
111+
self.assertEqual(obj.dtype, np.int8)
112+
113+
exp = pd.Series([1, 1, 3, 4], dtype=np.int8)
114+
self._assert_setitem_series_conversion(obj, np.int32(1), exp, np.int8)
115+
116+
# BUG: it must be Series([1, 1, 3, 4], dtype=np.int16)
117+
exp = pd.Series([1, 0, 3, 4], dtype=np.int8)
118+
self._assert_setitem_series_conversion(obj, np.int16(2**9), exp,
119+
np.int8)
120+
108121
def test_setitem_series_float64(self):
109122
obj = pd.Series([1.1, 2.2, 3.3, 4.4])
110123
assert obj.dtype == np.float64
@@ -205,6 +218,13 @@ def test_setitem_series_datetime64(self):
205218
pd.Timestamp('2011-01-04')])
206219
self._assert_setitem_series_conversion(obj, 1, exp, 'datetime64[ns]')
207220

221+
# datetime64 + object -> object
222+
exp = pd.Series([pd.Timestamp('2011-01-01'),
223+
'x',
224+
pd.Timestamp('2011-01-03'),
225+
pd.Timestamp('2011-01-04')])
226+
self._assert_setitem_series_conversion(obj, 'x', exp, np.object)
227+
208228
# ToDo: add more tests once the above issue has been fixed
209229

210230
def test_setitem_series_datetime64tz(self):
@@ -224,19 +244,62 @@ def test_setitem_series_datetime64tz(self):
224244
self._assert_setitem_series_conversion(obj, value, exp,
225245
'datetime64[ns, US/Eastern]')
226246

247+
# datetime64tz + datetime64tz (different tz) -> object
248+
exp = pd.Series([pd.Timestamp('2011-01-01', tz=tz),
249+
pd.Timestamp('2012-01-01', tz='US/Pacific'),
250+
pd.Timestamp('2011-01-03', tz=tz),
251+
pd.Timestamp('2011-01-04', tz=tz)])
252+
value = pd.Timestamp('2012-01-01', tz='US/Pacific')
253+
self._assert_setitem_series_conversion(obj, value, exp, np.object)
254+
255+
# datetime64tz + datetime64 -> object
256+
exp = pd.Series([pd.Timestamp('2011-01-01', tz=tz),
257+
pd.Timestamp('2012-01-01'),
258+
pd.Timestamp('2011-01-03', tz=tz),
259+
pd.Timestamp('2011-01-04', tz=tz)])
260+
value = pd.Timestamp('2012-01-01')
261+
self._assert_setitem_series_conversion(obj, value, exp, np.object)
262+
227263
# datetime64 + int -> object
228-
# ToDo: The result must be object
229264
exp = pd.Series([pd.Timestamp('2011-01-01', tz=tz),
230-
pd.Timestamp(1, tz=tz),
265+
1,
231266
pd.Timestamp('2011-01-03', tz=tz),
232267
pd.Timestamp('2011-01-04', tz=tz)])
233-
self._assert_setitem_series_conversion(obj, 1, exp,
234-
'datetime64[ns, US/Eastern]')
268+
self._assert_setitem_series_conversion(obj, 1, exp, np.object)
235269

236270
# ToDo: add more tests once the above issue has been fixed
237271

238272
def test_setitem_series_timedelta64(self):
239-
pass
273+
obj = pd.Series([pd.Timedelta('1 day'),
274+
pd.Timedelta('2 day'),
275+
pd.Timedelta('3 day'),
276+
pd.Timedelta('4 day')])
277+
self.assertEqual(obj.dtype, 'timedelta64[ns]')
278+
279+
# timedelta64 + timedelta64 -> timedelta64
280+
exp = pd.Series([pd.Timedelta('1 day'),
281+
pd.Timedelta('12 day'),
282+
pd.Timedelta('3 day'),
283+
pd.Timedelta('4 day')])
284+
self._assert_setitem_series_conversion(obj, pd.Timedelta('12 day'),
285+
exp, 'timedelta64[ns]')
286+
287+
# timedelta64 + int -> object
288+
# ToDo: The result must be object
289+
exp = pd.Series([pd.Timedelta('1 day'),
290+
pd.Timedelta(1),
291+
pd.Timedelta('3 day'),
292+
pd.Timedelta('4 day')])
293+
self._assert_setitem_series_conversion(obj, 1, exp, 'timedelta64[ns]')
294+
295+
# timedelta64 + object -> object
296+
exp = pd.Series([pd.Timedelta('1 day'),
297+
'x',
298+
pd.Timedelta('3 day'),
299+
pd.Timedelta('4 day')])
300+
self._assert_setitem_series_conversion(obj, 'x', exp, np.object)
301+
302+
# ToDo: add more tests once the above issue has been fixed
240303

241304
def test_setitem_series_period(self):
242305
pass
@@ -1033,14 +1096,12 @@ def test_fillna_series_datetime64tz(self):
10331096
value = pd.Timestamp('2012-01-01', tz='Asia/Tokyo')
10341097
self._assert_fillna_conversion(obj, value, exp, np.object)
10351098

1036-
# datetime64tz + int => datetime64tz
1037-
# ToDo: must be object
1099+
# datetime64tz + int => object
10381100
exp = pd.Series([pd.Timestamp('2011-01-01', tz=tz),
1039-
pd.Timestamp(1, tz=tz),
1101+
1,
10401102
pd.Timestamp('2011-01-03', tz=tz),
10411103
pd.Timestamp('2011-01-04', tz=tz)])
1042-
self._assert_fillna_conversion(obj, 1, exp,
1043-
'datetime64[ns, US/Eastern]')
1104+
self._assert_fillna_conversion(obj, 1, exp, np.object)
10441105

10451106
# datetime64tz + object => object
10461107
exp = pd.Series([pd.Timestamp('2011-01-01', tz=tz),

‎pandas/tests/indexing/test_indexing.py

Lines changed: 6 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -354,6 +354,12 @@ def test_multi_assign(self):
354354
tm.assert_frame_equal(df2, expected)
355355

356356
# with an ndarray on rhs
357+
# coerces to float64 because values has float64 dtype
358+
# GH 14001
359+
expected = DataFrame({'FC': ['a', np.nan, 'a', 'b', 'a', 'b'],
360+
'PF': [0, 0, 0, 0, 1, 1],
361+
'col1': [0., 1., 4., 6., 8., 10.],
362+
'col2': [12, 7, 16, np.nan, 20, 22]})
357363
df2 = df.copy()
358364
df2.loc[mask, cols] = dft.loc[mask, cols].values
359365
tm.assert_frame_equal(df2, expected)

‎pandas/tests/series/test_analytics.py

Lines changed: 4 additions & 4 deletions
Original file line numberDiff line numberDiff line change
@@ -1021,11 +1021,11 @@ def test_clip_with_datetimes(self):
10211021
# naive and tz-aware datetimes
10221022

10231023
t = Timestamp('2015-12-01 09:30:30')
1024-
s = Series([Timestamp('2015-12-01 09:30:00'), Timestamp(
1025-
'2015-12-01 09:31:00')])
1024+
s = Series([Timestamp('2015-12-01 09:30:00'),
1025+
Timestamp('2015-12-01 09:31:00')])
10261026
result = s.clip(upper=t)
1027-
expected = Series([Timestamp('2015-12-01 09:30:00'), Timestamp(
1028-
'2015-12-01 09:30:30')])
1027+
expected = Series([Timestamp('2015-12-01 09:30:00'),
1028+
Timestamp('2015-12-01 09:30:30')])
10291029
assert_series_equal(result, expected)
10301030

10311031
t = Timestamp('2015-12-01 09:30:30', tz='US/Eastern')

‎pandas/tests/series/test_missing.py

Lines changed: 7 additions & 6 deletions
Original file line numberDiff line numberDiff line change
@@ -52,14 +52,14 @@ class TestSeriesMissingData(TestData):
5252

5353
def test_timedelta_fillna(self):
5454
# GH 3371
55-
s = Series([Timestamp('20130101'), Timestamp('20130101'), Timestamp(
56-
'20130102'), Timestamp('20130103 9:01:01')])
55+
s = Series([Timestamp('20130101'), Timestamp('20130101'),
56+
Timestamp('20130102'), Timestamp('20130103 9:01:01')])
5757
td = s.diff()
5858

5959
# reg fillna
6060
result = td.fillna(0)
61-
expected = Series([timedelta(0), timedelta(0), timedelta(1), timedelta(
62-
days=1, seconds=9 * 3600 + 60 + 1)])
61+
expected = Series([timedelta(0), timedelta(0), timedelta(1),
62+
timedelta(days=1, seconds=9 * 3600 + 60 + 1)])
6363
assert_series_equal(result, expected)
6464

6565
# interprested as seconds
@@ -69,8 +69,9 @@ def test_timedelta_fillna(self):
6969
assert_series_equal(result, expected)
7070

7171
result = td.fillna(timedelta(days=1, seconds=1))
72-
expected = Series([timedelta(days=1, seconds=1), timedelta(
73-
0), timedelta(1), timedelta(days=1, seconds=9 * 3600 + 60 + 1)])
72+
expected = Series([timedelta(days=1, seconds=1), timedelta(0),
73+
timedelta(1),
74+
timedelta(days=1, seconds=9 * 3600 + 60 + 1)])
7475
assert_series_equal(result, expected)
7576

7677
result = td.fillna(np.timedelta64(int(1e9)))

0 commit comments

Comments
 (0)
Please sign in to comment.