Skip to content

Commit abf6a14

Browse files
authored
chore: update requires-python in pyproject.toml (#2080)
1 parent 53a6da4 commit abf6a14

File tree

9 files changed

+26
-11
lines changed

9 files changed

+26
-11
lines changed

benchmarks/bench_batch_attention.py

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -436,7 +436,7 @@ def main(args: argparse.Namespace) -> None:
436436
records_new = []
437437
records_separate = []
438438
for cfg_id, (decode_case, prefill_case) in enumerate(
439-
zip(decode_lens, prefill_lens), start=1
439+
zip(decode_lens, prefill_lens, strict=True), start=1
440440
):
441441
prefill_kv_lens = [p[0] for p in prefill_case]
442442
prefill_qo_lens = [p[1] for p in prefill_case]

benchmarks/bench_mixed_attention.py

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -218,7 +218,7 @@ def _run_single_prefill():
218218
head_dim = 128
219219

220220
for idx, (p_q_lens, p_kv_lens, d_q_len, d_kv_len) in enumerate(
221-
zip(p_q_configs, p_kv_configs, d_q_len_configs, d_kv_len_configs)
221+
zip(p_q_configs, p_kv_configs, d_q_len_configs, d_kv_len_configs, strict=True)
222222
):
223223
print(f"===== Benchmark {idx + 1}: (kv_len, qo_len) set =====")
224224
run_bench(

docs/installation.rst

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -13,7 +13,7 @@ Prerequisites
1313

1414
- OS: Linux only
1515

16-
- Python: 3.9, 3.10, 3.11, 3.12, 3.13
16+
- Python: 3.10, 3.11, 3.12, 3.13, 3.14
1717

1818
Quick Start
1919
^^^^^^^^^^^

flashinfer/autotuner.py

Lines changed: 3 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -648,7 +648,9 @@ def _generate_optimization_profiles(
648648

649649
opt_shapes_max = {
650650
v1: v2
651-
for v1, v2 in zip(opt_shapes, tuple(opt_shapes[1:]) + (float("inf"),))
651+
for v1, v2 in zip(
652+
opt_shapes, tuple(opt_shapes[1:]) + (float("inf"),), strict=True
653+
)
652654
}
653655
dynamic_dims.append(
654656
(spec.input_idx, spec.dim_idx, opt_shapes_max, opt_shapes)

flashinfer/cascade.py

Lines changed: 3 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -349,6 +349,7 @@ def __init__(
349349
paged_kv_indptr_buf_arr,
350350
paged_kv_indices_buf_arr,
351351
paged_kv_last_page_len_buf_arr,
352+
strict=True,
352353
)
353354
]
354355
else:
@@ -381,7 +382,7 @@ def reset_workspace_buffer(
381382
be the same as the device of the input tensors.
382383
"""
383384
for wrapper, int_workspace_buffer in zip(
384-
self._batch_prefill_wrappers, int_workspace_buffers
385+
self._batch_prefill_wrappers, int_workspace_buffers, strict=True
385386
):
386387
wrapper.reset_workspace_buffer(float_workspace_buffer, int_workspace_buffer)
387388

@@ -479,6 +480,7 @@ def plan(
479480
paged_kv_indptr_arr,
480481
paged_kv_indices_arr,
481482
paged_kv_last_page_len,
483+
strict=True,
482484
)
483485
):
484486
wrapper.plan(

flashinfer/comm/mnnvl.py

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -414,7 +414,7 @@ def open_mnnvl_memory(mapping: Mapping, size: int):
414414
pidfds.append(pidfd)
415415

416416
remote_fds = []
417-
for pidfd, fd in zip(pidfds, all_handles_data):
417+
for pidfd, fd in zip(pidfds, all_handles_data, strict=True):
418418
remote_fd = syscall(SYS_pidfd_getfd, pidfd, fd, 0)
419419
if remote_fd < 0:
420420
err = ctypes.get_errno()

flashinfer/cute_dsl/blockscaled_gemm.py

Lines changed: 2 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -154,6 +154,7 @@ def __new_from_mlir_values__(self, values):
154154
self._cluster_shape_mnk,
155155
],
156156
self._values_pos,
157+
strict=True,
157158
):
158159
obj_list.append(new_from_mlir_values(obj, values[:n_items]))
159160
values = values[n_items:]
@@ -348,6 +349,7 @@ def _get_current_work_for_linear_idx(
348349
cur_cluster_coord,
349350
self.cta_id_in_cluster,
350351
(*self.params.cluster_shape_mn, Int32(1)),
352+
strict=True,
351353
)
352354
)
353355

flashinfer/jit/attention/utils.py

Lines changed: 13 additions & 4 deletions
Original file line numberDiff line numberDiff line change
@@ -30,11 +30,14 @@ def generate_additional_params(
3030
for dtype, var in zip(
3131
additional_tensor_dtypes,
3232
additional_tensor_names,
33+
strict=True,
3334
)
3435
]
3536
+ [
3637
f"{dtype} {var};\n"
37-
for dtype, var in zip(additional_scalar_dtypes, additional_scalar_names)
38+
for dtype, var in zip(
39+
additional_scalar_dtypes, additional_scalar_names, strict=True
40+
)
3841
]
3942
)
4043
additional_func_params = "".join(
@@ -48,7 +51,9 @@ def generate_additional_params(
4851
]
4952
+ [
5053
f", {dtype} {var}"
51-
for dtype, var in zip(additional_scalar_dtypes, additional_scalar_names)
54+
for dtype, var in zip(
55+
additional_scalar_dtypes, additional_scalar_names, strict=True
56+
)
5257
]
5358
)
5459
if is_sm90_template:
@@ -59,7 +64,9 @@ def generate_additional_params(
5964
if var.startswith("maybe")
6065
else f"params.additional_params.{var} = static_cast<{dtype}*>({var}.data_ptr());"
6166
)
62-
for dtype, var in zip(additional_tensor_dtypes, additional_tensor_names)
67+
for dtype, var in zip(
68+
additional_tensor_dtypes, additional_tensor_names, strict=True
69+
)
6370
]
6471
+ [
6572
f"params.additional_params.{var} = {var};"
@@ -74,7 +81,9 @@ def generate_additional_params(
7481
if var.startswith("maybe")
7582
else f"params.{var} = static_cast<{dtype}*>({var}.data_ptr());"
7683
)
77-
for dtype, var in zip(additional_tensor_dtypes, additional_tensor_names)
84+
for dtype, var in zip(
85+
additional_tensor_dtypes, additional_tensor_names, strict=True
86+
)
7887
]
7988
+ [f"params.{var} = {var};" for var in additional_scalar_names]
8089
)

pyproject.toml

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -15,7 +15,7 @@
1515
[project]
1616
name = "flashinfer-python"
1717
description = "FlashInfer: Kernel Library for LLM Serving"
18-
requires-python = ">=3.9,<4.0"
18+
requires-python = ">=3.10,<4.0"
1919
authors = [{ name = "FlashInfer team" }]
2020
license = "Apache-2.0"
2121
readme = "README.md"

0 commit comments

Comments
 (0)