Skip to content

Commit

Permalink
Formatting
Browse files Browse the repository at this point in the history
  • Loading branch information
gshtras committed Nov 11, 2024
1 parent 409a439 commit 3700cc9
Show file tree
Hide file tree
Showing 4 changed files with 7 additions and 6 deletions.
2 changes: 1 addition & 1 deletion .github/workflows/scripts/build.sh
Original file line number Diff line number Diff line change
Expand Up @@ -12,7 +12,7 @@ export MAX_JOBS=1
# Make sure release wheels are built for the following architectures
export PYTORCH_ROCM_ARCH="gfx90a;gfx942"

rm -f $(which sccache)
rm -f "$(which sccache)"

export MAX_JOBS=32

Expand Down
6 changes: 3 additions & 3 deletions vllm/_custom_ops.py
Original file line number Diff line number Diff line change
@@ -1,7 +1,7 @@
import contextlib
import functools
import importlib
from typing import TYPE_CHECKING, List, Optional, Tuple, Type, Union
from typing import TYPE_CHECKING, List, Optional, Tuple, Union

import torch
import torch.library
Expand Down Expand Up @@ -242,8 +242,8 @@ def scaled_rms_norm(out: torch.Tensor, input: torch.Tensor,
def scaled_fused_add_rms_norm(out: torch.Tensor, input: torch.Tensor,
residual: torch.Tensor, weight: torch.Tensor,
scale: torch.Tensor, epsilon: float) -> None:
torch.ops._C.fused_add_rms_norm_static_fp8_quant(out, input, residual, weight, scale,
epsilon)
torch.ops._C.fused_add_rms_norm_static_fp8_quant(out, input, residual,
weight, scale, epsilon)


def advance_step_flashattn(num_seqs: int, num_queries: int, block_size: int,
Expand Down
1 change: 1 addition & 0 deletions vllm/attention/backends/hpu_attn.py
Original file line number Diff line number Diff line change
Expand Up @@ -141,6 +141,7 @@ def forward(
k_scale: float = 1.0,
v_scale: float = 1.0,
attn_type: AttentionType = AttentionType.DECODER,
fp8_out_scale: Optional[torch.Tensor] = None,
) -> torch.Tensor:
"""Forward pass with xFormers and PagedAttention.
Expand Down
4 changes: 2 additions & 2 deletions vllm/utils.py
Original file line number Diff line number Diff line change
Expand Up @@ -158,7 +158,7 @@ class _Sentinel:
ALL_PINNED_SENTINEL = _Sentinel()


class rpd_trace():
class rpd_trace:

def __init__(self,
filename=None,
Expand Down Expand Up @@ -244,7 +244,7 @@ def is_hipScopedMarker_available():
return hipScopedMarker is not None


class rpd_mark():
class rpd_mark:

def __init__(self, name=None):
self.name = name
Expand Down

0 comments on commit 3700cc9

Please sign in to comment.