@@ -1204,14 +1204,14 @@ def test_resample_anchored_multiday(label, sec):
12041204 assert result .index [- 1 ] == Timestamp (f"2014-10-15 23:00:{ sec } 00" )
12051205
12061206
1207- def test_corner_cases ():
1207+ def test_corner_cases (unit ):
12081208 # miscellaneous test coverage
12091209
1210- rng = date_range ("1/1/2000" , periods = 12 , freq = "t" )
1210+ rng = date_range ("1/1/2000" , periods = 12 , freq = "t" ). as_unit ( unit )
12111211 ts = Series (np .random .randn (len (rng )), index = rng )
12121212
12131213 result = ts .resample ("5t" , closed = "right" , label = "left" ).mean ()
1214- ex_index = date_range ("1999-12-31 23:55" , periods = 4 , freq = "5t" )
1214+ ex_index = date_range ("1999-12-31 23:55" , periods = 4 , freq = "5t" ). as_unit ( unit )
12151215 tm .assert_index_equal (result .index , ex_index )
12161216
12171217
@@ -1223,33 +1223,34 @@ def test_corner_cases_period(simple_period_range_series):
12231223 assert len (result ) == 0
12241224
12251225
1226- def test_corner_cases_date (simple_date_range_series ):
1226+ def test_corner_cases_date (simple_date_range_series , unit ):
12271227 # resample to periods
12281228 ts = simple_date_range_series ("2000-04-28" , "2000-04-30 11:00" , freq = "h" )
1229+ ts .index = ts .index .as_unit (unit )
12291230 result = ts .resample ("M" , kind = "period" ).mean ()
12301231 assert len (result ) == 1
12311232 assert result .index [0 ] == Period ("2000-04" , freq = "M" )
12321233
12331234
1234- def test_anchored_lowercase_buglet ():
1235- dates = date_range ("4/16/2012 20:00" , periods = 50000 , freq = "s" )
1235+ def test_anchored_lowercase_buglet (unit ):
1236+ dates = date_range ("4/16/2012 20:00" , periods = 50000 , freq = "s" ). as_unit ( unit )
12361237 ts = Series (np .random .randn (len (dates )), index = dates )
12371238 # it works!
12381239 ts .resample ("d" ).mean ()
12391240
12401241
1241- def test_upsample_apply_functions ():
1242+ def test_upsample_apply_functions (unit ):
12421243 # #1596
1243- rng = date_range ("2012-06-12" , periods = 4 , freq = "h" )
1244+ rng = date_range ("2012-06-12" , periods = 4 , freq = "h" ). as_unit ( unit )
12441245
12451246 ts = Series (np .random .randn (len (rng )), index = rng )
12461247
12471248 result = ts .resample ("20min" ).aggregate (["mean" , "sum" ])
12481249 assert isinstance (result , DataFrame )
12491250
12501251
1251- def test_resample_not_monotonic ():
1252- rng = date_range ("2012-06-12" , periods = 200 , freq = "h" )
1252+ def test_resample_not_monotonic (unit ):
1253+ rng = date_range ("2012-06-12" , periods = 200 , freq = "h" ). as_unit ( unit )
12531254 ts = Series (np .random .randn (len (rng )), index = rng )
12541255
12551256 ts = ts .take (np .random .permutation (len (ts )))
@@ -1401,7 +1402,7 @@ def test_resample_timegrouper(dates):
14011402 tm .assert_frame_equal (result , expected )
14021403
14031404
1404- def test_resample_nunique ():
1405+ def test_resample_nunique (unit ):
14051406
14061407 # GH 12352
14071408 df = DataFrame (
@@ -1416,6 +1417,7 @@ def test_resample_nunique():
14161417 },
14171418 }
14181419 )
1420+ df .index = df .index .as_unit (unit )
14191421 r = df .resample ("D" )
14201422 g = df .groupby (Grouper (freq = "D" ))
14211423 expected = df .groupby (Grouper (freq = "D" )).ID .apply (lambda x : x .nunique ())
@@ -1432,9 +1434,10 @@ def test_resample_nunique():
14321434 tm .assert_series_equal (result , expected )
14331435
14341436
1435- def test_resample_nunique_preserves_column_level_names ():
1437+ def test_resample_nunique_preserves_column_level_names (unit ):
14361438 # see gh-23222
14371439 df = tm .makeTimeDataFrame (freq = "1D" ).abs ()
1440+ df .index = df .index .as_unit (unit )
14381441 df .columns = pd .MultiIndex .from_arrays (
14391442 [df .columns .tolist ()] * 2 , names = ["lev0" , "lev1" ]
14401443 )
@@ -1540,17 +1543,19 @@ def test_resample_across_dst():
15401543 tm .assert_frame_equal (result , expected )
15411544
15421545
1543- def test_groupby_with_dst_time_change ():
1546+ def test_groupby_with_dst_time_change (unit ):
15441547 # GH 24972
1545- index = DatetimeIndex (
1546- [1478064900001000000 , 1480037118776792000 ], tz = "UTC"
1547- ).tz_convert ("America/Chicago" )
1548+ index = (
1549+ DatetimeIndex ([1478064900001000000 , 1480037118776792000 ], tz = "UTC" )
1550+ .tz_convert ("America/Chicago" )
1551+ .as_unit (unit )
1552+ )
15481553
15491554 df = DataFrame ([1 , 2 ], index = index )
15501555 result = df .groupby (Grouper (freq = "1d" )).last ()
15511556 expected_index_values = date_range (
15521557 "2016-11-02" , "2016-11-24" , freq = "d" , tz = "America/Chicago"
1553- )
1558+ ). as_unit ( unit )
15541559
15551560 index = DatetimeIndex (expected_index_values )
15561561 expected = DataFrame ([1.0 ] + ([np .nan ] * 21 ) + [2.0 ], index = index )
0 commit comments