Skip to content

Commit

Permalink
NFC: Fix minor lint
Browse files Browse the repository at this point in the history
Signed-off-by: Pavani Majety <[email protected]>
  • Loading branch information
pavanimajety committed Nov 19, 2024
1 parent ec4a460 commit 8d52d41
Showing 1 changed file with 3 additions and 7 deletions.
10 changes: 3 additions & 7 deletions vllm/attention/backends/flashinfer.py
Original file line number Diff line number Diff line change
Expand Up @@ -757,10 +757,8 @@ def __init__(
if alibi_slopes is not None:
alibi_slopes = torch.tensor(alibi_slopes, dtype=torch.float32)
self.alibi_slopes = alibi_slopes
if sliding_window is None:
self.sliding_window = (-1, -1)
else:
self.sliding_window = (sliding_window - 1, 0)
self.sliding_window = ((sliding_window - 1,
0) if sliding_window is not None else (-1, -1))
self.kv_cache_dtype = kv_cache_dtype
self.logits_soft_cap = logits_soft_cap

Expand Down Expand Up @@ -866,9 +864,7 @@ def unified_flash_infer(
assert query.shape[0] == num_prefill_tokens
assert decode_query.shape[0] == num_decode_tokens

window_left = -1
if window_size is not None:
window_left = window_size[0]
window_left = window_size[0] if window_size is not None else -1

prefill_output: Optional[torch.Tensor] = None
decode_output: Optional[torch.Tensor] = None
Expand Down

0 comments on commit 8d52d41

Please sign in to comment.