Skip to content

Commit

Permalink
fix amd build
Browse files Browse the repository at this point in the history
Signed-off-by: Sage Moore <[email protected]>
  • Loading branch information
SageMoore committed Dec 19, 2024
1 parent 8514b0e commit ec1290a
Showing 1 changed file with 2 additions and 2 deletions.
4 changes: 2 additions & 2 deletions csrc/quantization/activation_kernels.cu
Original file line number Diff line number Diff line change
Expand Up @@ -20,7 +20,7 @@ __device__ __forceinline__ FP8_TYPE
scaled_fp8_conversion(float const val, float const inverted_scale) {
float x = val * inverted_scale;
float r = fmax(-FP8_E4M3_MAX, fmin(x, FP8_E4M3_MAX));
return static_cast<c10::Float8_e4m3fn>(r);
return static_cast<FP8_TYPE>(r);
}

// Activation and gating kernel template.
Expand Down Expand Up @@ -117,4 +117,4 @@ void silu_and_mul_quant(torch::Tensor& out, // [..., d]
input.dtype() == torch::kBFloat16);
TORCH_CHECK(input.size(-1) % 2 == 0);
LAUNCH_ACTIVATION_GATE_KERNEL(vllm::silu_kernel);
}
}

0 comments on commit ec1290a

Please sign in to comment.