Skip to content

Commit 7a1dad1

Browse files
ShaharNavehWillAyd
authored andcommitted
CLN/STY: Some stuff that got the attention of my eye (#31076)
1 parent 7eafd73 commit 7a1dad1

File tree

5 files changed

+74
-55
lines changed

5 files changed

+74
-55
lines changed

pandas/_libs/hashtable.pyx

+40-20
Original file line numberDiff line numberDiff line change
@@ -13,26 +13,45 @@ cnp.import_array()
1313
cdef extern from "numpy/npy_math.h":
1414
float64_t NAN "NPY_NAN"
1515

16-
1716
from pandas._libs.khash cimport (
1817
khiter_t,
19-
20-
kh_str_t, kh_init_str, kh_put_str, kh_exist_str,
21-
kh_get_str, kh_destroy_str, kh_resize_str,
22-
23-
kh_put_strbox, kh_get_strbox, kh_init_strbox,
24-
25-
kh_int64_t, kh_init_int64, kh_resize_int64, kh_destroy_int64,
26-
kh_get_int64, kh_exist_int64, kh_put_int64,
27-
28-
kh_float64_t, kh_exist_float64, kh_put_float64, kh_init_float64,
29-
kh_get_float64, kh_destroy_float64, kh_resize_float64,
30-
31-
kh_resize_uint64, kh_exist_uint64, kh_destroy_uint64, kh_put_uint64,
32-
kh_get_uint64, kh_init_uint64,
33-
34-
kh_destroy_pymap, kh_exist_pymap, kh_init_pymap, kh_get_pymap,
35-
kh_put_pymap, kh_resize_pymap)
18+
kh_str_t,
19+
kh_init_str,
20+
kh_put_str,
21+
kh_exist_str,
22+
kh_get_str,
23+
kh_destroy_str,
24+
kh_resize_str,
25+
kh_put_strbox,
26+
kh_get_strbox,
27+
kh_init_strbox,
28+
kh_int64_t,
29+
kh_init_int64,
30+
kh_resize_int64,
31+
kh_destroy_int64,
32+
kh_get_int64,
33+
kh_exist_int64,
34+
kh_put_int64,
35+
kh_float64_t,
36+
kh_exist_float64,
37+
kh_put_float64,
38+
kh_init_float64,
39+
kh_get_float64,
40+
kh_destroy_float64,
41+
kh_resize_float64,
42+
kh_resize_uint64,
43+
kh_exist_uint64,
44+
kh_destroy_uint64,
45+
kh_put_uint64,
46+
kh_get_uint64,
47+
kh_init_uint64,
48+
kh_destroy_pymap,
49+
kh_exist_pymap,
50+
kh_init_pymap,
51+
kh_get_pymap,
52+
kh_put_pymap,
53+
kh_resize_pymap,
54+
)
3655

3756

3857
cimport pandas._libs.util as util
@@ -63,8 +82,9 @@ cdef class Factorizer:
6382
def get_count(self):
6483
return self.count
6584

66-
def factorize(self, ndarray[object] values, sort=False, na_sentinel=-1,
67-
na_value=None):
85+
def factorize(
86+
self, ndarray[object] values, sort=False, na_sentinel=-1, na_value=None
87+
):
6888
"""
6989
Factorize values with nans replaced by na_sentinel
7090
>>> factorize(np.array([1,2,np.nan], dtype='O'), na_sentinel=20)

pandas/_libs/window/aggregations.pyx

+9-4
Original file line numberDiff line numberDiff line change
@@ -56,8 +56,9 @@ cdef:
5656
cdef inline int int_max(int a, int b): return a if a >= b else b
5757
cdef inline int int_min(int a, int b): return a if a <= b else b
5858

59-
cdef inline bint is_monotonic_start_end_bounds(ndarray[int64_t, ndim=1] start,
60-
ndarray[int64_t, ndim=1] end):
59+
cdef inline bint is_monotonic_start_end_bounds(
60+
ndarray[int64_t, ndim=1] start, ndarray[int64_t, ndim=1] end
61+
):
6162
return is_monotonic(start, False)[0] and is_monotonic(end, False)[0]
6263

6364
# Cython implementations of rolling sum, mean, variance, skewness,
@@ -90,8 +91,12 @@ cdef inline bint is_monotonic_start_end_bounds(ndarray[int64_t, ndim=1] start,
9091
# this is only an impl for index not None, IOW, freq aware
9192

9293

93-
def roll_count(ndarray[float64_t] values, ndarray[int64_t] start, ndarray[int64_t] end,
94-
int64_t minp):
94+
def roll_count(
95+
ndarray[float64_t] values,
96+
ndarray[int64_t] start,
97+
ndarray[int64_t] end,
98+
int64_t minp,
99+
):
95100
cdef:
96101
float64_t val, count_x = 0.0
97102
int64_t s, e, nobs, N = len(values)

pandas/io/sas/sas.pyx

+22-28
Original file line numberDiff line numberDiff line change
@@ -13,8 +13,7 @@ ctypedef unsigned short uint16_t
1313
# algorithm. It is partially documented here:
1414
#
1515
# https://cran.r-project.org/package=sas7bdat/vignettes/sas7bdat.pdf
16-
cdef const uint8_t[:] rle_decompress(int result_length,
17-
const uint8_t[:] inbuff):
16+
cdef const uint8_t[:] rle_decompress(int result_length, const uint8_t[:] inbuff):
1817

1918
cdef:
2019
uint8_t control_byte, x
@@ -117,8 +116,7 @@ cdef const uint8_t[:] rle_decompress(int result_length,
117116
# rdc_decompress decompresses data using the Ross Data Compression algorithm:
118117
#
119118
# http://collaboration.cmc.ec.gc.ca/science/rpn/biblio/ddj/Website/articles/CUJ/1992/9210/ross/ross.htm
120-
cdef const uint8_t[:] rdc_decompress(int result_length,
121-
const uint8_t[:] inbuff):
119+
cdef const uint8_t[:] rdc_decompress(int result_length, const uint8_t[:] inbuff):
122120

123121
cdef:
124122
uint8_t cmd
@@ -233,8 +231,7 @@ cdef class Parser:
233231
int subheader_pointer_length
234232
int current_page_type
235233
bint is_little_endian
236-
const uint8_t[:] (*decompress)(int result_length,
237-
const uint8_t[:] inbuff)
234+
const uint8_t[:] (*decompress)(int result_length, const uint8_t[:] inbuff)
238235
object parser
239236

240237
def __init__(self, object parser):
@@ -267,9 +264,7 @@ cdef class Parser:
267264
elif column_types[j] == b's':
268265
self.column_types[j] = column_type_string
269266
else:
270-
raise ValueError(
271-
f"unknown column type: {self.parser.columns[j].ctype}"
272-
)
267+
raise ValueError(f"unknown column type: {self.parser.columns[j].ctype}")
273268

274269
# compression
275270
if parser.compression == const.rle_compression:
@@ -296,8 +291,7 @@ cdef class Parser:
296291

297292
# update the parser
298293
self.parser._current_row_on_page_index = self.current_row_on_page_index
299-
self.parser._current_row_in_chunk_index =\
300-
self.current_row_in_chunk_index
294+
self.parser._current_row_in_chunk_index = self.current_row_in_chunk_index
301295
self.parser._current_row_in_file_index = self.current_row_in_file_index
302296

303297
cdef bint read_next_page(self):
@@ -318,9 +312,9 @@ cdef class Parser:
318312
self.current_page_type = self.parser._current_page_type
319313
self.current_page_block_count = self.parser._current_page_block_count
320314
self.current_page_data_subheader_pointers_len = len(
321-
self.parser._current_page_data_subheader_pointers)
322-
self.current_page_subheaders_count =\
323-
self.parser._current_page_subheaders_count
315+
self.parser._current_page_data_subheader_pointers
316+
)
317+
self.current_page_subheaders_count = self.parser._current_page_subheaders_count
324318

325319
cdef readline(self):
326320

@@ -358,31 +352,31 @@ cdef class Parser:
358352
return False
359353
elif (self.current_page_type == page_mix_types_0 or
360354
self.current_page_type == page_mix_types_1):
361-
align_correction = (bit_offset + subheader_pointers_offset +
362-
self.current_page_subheaders_count *
363-
subheader_pointer_length)
355+
align_correction = (
356+
bit_offset
357+
+ subheader_pointers_offset
358+
+ self.current_page_subheaders_count * subheader_pointer_length
359+
)
364360
align_correction = align_correction % 8
365361
offset = bit_offset + align_correction
366362
offset += subheader_pointers_offset
367-
offset += (self.current_page_subheaders_count *
368-
subheader_pointer_length)
363+
offset += self.current_page_subheaders_count * subheader_pointer_length
369364
offset += self.current_row_on_page_index * self.row_length
370-
self.process_byte_array_with_data(offset,
371-
self.row_length)
372-
mn = min(self.parser.row_count,
373-
self.parser._mix_page_row_count)
365+
self.process_byte_array_with_data(offset, self.row_length)
366+
mn = min(self.parser.row_count, self.parser._mix_page_row_count)
374367
if self.current_row_on_page_index == mn:
375368
done = self.read_next_page()
376369
if done:
377370
return True
378371
return False
379372
elif self.current_page_type & page_data_type == page_data_type:
380373
self.process_byte_array_with_data(
381-
bit_offset + subheader_pointers_offset +
382-
self.current_row_on_page_index * self.row_length,
383-
self.row_length)
384-
flag = (self.current_row_on_page_index ==
385-
self.current_page_block_count)
374+
bit_offset
375+
+ subheader_pointers_offset
376+
+ self.current_row_on_page_index * self.row_length,
377+
self.row_length,
378+
)
379+
flag = self.current_row_on_page_index == self.current_page_block_count
386380
if flag:
387381
done = self.read_next_page()
388382
if done:

pandas/tseries/offsets.py

+2-2
Original file line numberDiff line numberDiff line change
@@ -2667,8 +2667,8 @@ def _delta_to_tick(delta: timedelta) -> Tick:
26672667
return Second(seconds)
26682668
else:
26692669
nanos = delta_to_nanoseconds(delta)
2670-
if nanos % 1000000 == 0:
2671-
return Milli(nanos // 1000000)
2670+
if nanos % 1_000_000 == 0:
2671+
return Milli(nanos // 1_000_000)
26722672
elif nanos % 1000 == 0:
26732673
return Micro(nanos // 1000)
26742674
else: # pragma: no cover

setup.py

+1-1
Original file line numberDiff line numberDiff line change
@@ -356,7 +356,7 @@ def run(self):
356356
sourcefile = pyxfile[:-3] + extension
357357
msg = (
358358
f"{extension}-source file '{sourcefile}' not found.\n"
359-
f"Run 'setup.py cython' before sdist."
359+
"Run 'setup.py cython' before sdist."
360360
)
361361
assert os.path.isfile(sourcefile), msg
362362
sdist_class.run(self)

0 commit comments

Comments
 (0)