-
-
Notifications
You must be signed in to change notification settings - Fork 5k
Commit
This commit does not belong to any branch on this repository, and may belong to a fork outside of the repository.
Signed-off-by: wangxiyuan <[email protected]>
- Loading branch information
1 parent
48edab8
commit 80f9986
Showing
11 changed files
with
330 additions
and
143 deletions.
There are no files selected for viewing
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
Original file line number | Diff line number | Diff line change |
---|---|---|
@@ -0,0 +1,9 @@ | ||
from setuptools import setup | ||
|
||
setup(name='vllm_add_dummy_platform', | ||
version='0.1', | ||
packages=['vllm_add_dummy_platform'], | ||
entry_points={ | ||
'vllm.general_plugins': | ||
["register_dummy_model = vllm_add_dummy_platform:register"] | ||
}) |
9 changes: 9 additions & 0 deletions
9
tests/plugins/vllm_add_dummy_platform/vllm_add_dummy_platform/__init__.py
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
Original file line number | Diff line number | Diff line change |
---|---|---|
@@ -0,0 +1,9 @@ | ||
from vllm import PlatformRegistry | ||
|
||
|
||
def register(): | ||
# Register the dummy platform | ||
PlatformRegistry.register_platform( | ||
"my_platform", "vllm_add_dummy_platform.my_platform:DummyPlatform") | ||
# Set the current platform to the dummy platform | ||
PlatformRegistry.set_current_platform("my_platform") |
13 changes: 13 additions & 0 deletions
13
tests/plugins/vllm_add_dummy_platform/vllm_add_dummy_platform/my_attention.py
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
Original file line number | Diff line number | Diff line change |
---|---|---|
@@ -0,0 +1,13 @@ | ||
class DummyAttentionImpl: | ||
|
||
def forward(self): | ||
pass | ||
|
||
|
||
class DummyAttentionBackend: | ||
|
||
def __init__(self): | ||
pass | ||
|
||
def get_impl_cls(self): | ||
return DummyAttentionImpl |
7 changes: 7 additions & 0 deletions
7
tests/plugins/vllm_add_dummy_platform/vllm_add_dummy_platform/my_model_runner.py
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
Original file line number | Diff line number | Diff line change |
---|---|---|
@@ -0,0 +1,7 @@ | ||
from my_attention import DummyAttentionBackend | ||
|
||
|
||
class DummyModelRunner: | ||
|
||
def __init__(self): | ||
self.attn_backend = DummyAttentionBackend() |
19 changes: 19 additions & 0 deletions
19
tests/plugins/vllm_add_dummy_platform/vllm_add_dummy_platform/my_platform.py
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
Original file line number | Diff line number | Diff line change |
---|---|---|
@@ -0,0 +1,19 @@ | ||
from vllm.config import VllmConfig | ||
from vllm.platforms import Platform | ||
|
||
|
||
class DummyPlatform(Platform): | ||
device_name = "dummy" | ||
|
||
def __init__(self): | ||
super().__init__() | ||
|
||
@classmethod | ||
def get_device_name(cls) -> str: | ||
return "dummy" | ||
|
||
@classmethod | ||
def check_and_update_config(cls, vllm_config: VllmConfig) -> None: | ||
parallel_config = vllm_config.parallel_config | ||
parallel_config.worker_cls = \ | ||
"vllm_add_dummy_platform.my_worker.DummyWorker" |
14 changes: 14 additions & 0 deletions
14
tests/plugins/vllm_add_dummy_platform/vllm_add_dummy_platform/my_worker.py
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
Original file line number | Diff line number | Diff line change |
---|---|---|
@@ -0,0 +1,14 @@ | ||
from typing import List | ||
|
||
from my_model_runner import DummyModelRunner | ||
|
||
|
||
class DummyCacheEngine: | ||
pass | ||
|
||
|
||
class DummyWorker: | ||
|
||
def __init__(self): | ||
self.cache_engine = List[DummyCacheEngine] | ||
self.model_runner = DummyModelRunner() |
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
Original file line number | Diff line number | Diff line change |
---|---|---|
@@ -1,123 +1,57 @@ | ||
from .interface import _Backend # noqa: F401 | ||
from .interface import CpuArchEnum, Platform, PlatformEnum, UnspecifiedPlatform | ||
from .registry import PlatformRegistry, detect_current_platform | ||
|
||
current_platform: Platform | ||
_current_platform: Platform = UnspecifiedPlatform() | ||
|
||
# NOTE: we don't use `torch.version.cuda` / `torch.version.hip` because | ||
# they only indicate the build configuration, not the runtime environment. | ||
# For example, people can install a cuda build of pytorch but run on tpu. | ||
|
||
is_tpu = False | ||
try: | ||
# While it's technically possible to install libtpu on a non-TPU machine, | ||
# this is a very uncommon scenario. Therefore, we assume that libtpu is | ||
# installed if and only if the machine has TPUs. | ||
import libtpu # noqa: F401 | ||
is_tpu = True | ||
except Exception: | ||
pass | ||
def initialize_current_platform(): | ||
"""Initialize the current platform. This function is called when loading | ||
the vllm plugin.""" | ||
global _current_platform | ||
# Get the current platform from the registry first. If the current platform | ||
# is not set, try to detect the current platform. | ||
if PlatformRegistry.current_platform is not None: | ||
_current_platform = PlatformRegistry.get_current_platform_cls() | ||
else: | ||
_current_platform = detect_current_platform() | ||
|
||
is_cuda = False | ||
|
||
try: | ||
import pynvml | ||
pynvml.nvmlInit() | ||
try: | ||
if pynvml.nvmlDeviceGetCount() > 0: | ||
is_cuda = True | ||
finally: | ||
pynvml.nvmlShutdown() | ||
except Exception: | ||
# CUDA is supported on Jetson, but NVML may not be. | ||
import os | ||
def update_current_platform(device_name: str): | ||
"""Update the current platform. This function is used by users to set the | ||
current platform by hand.""" | ||
global _current_platform | ||
PlatformRegistry.set_current_platform(device_name) | ||
_current_platform = PlatformRegistry.get_current_platform_cls() | ||
|
||
def cuda_is_jetson() -> bool: | ||
return os.path.isfile("/etc/nv_tegra_release") \ | ||
or os.path.exists("/sys/class/tegra-firmware") | ||
|
||
if cuda_is_jetson(): | ||
is_cuda = True | ||
class CurrentPlatform: | ||
"""A wrapper that provides an interface to the current platform. | ||
`current_platform` is imported to many modules once vLLM is imported. | ||
Updating `current_platform` value directly will not work in those modules. | ||
So it needs the wrapper here to provide a dynamic platform loading | ||
mechanism. | ||
is_rocm = False | ||
This class can make sure that the `current_platform` is always up-to-date. | ||
""" | ||
|
||
try: | ||
import amdsmi | ||
amdsmi.amdsmi_init() | ||
try: | ||
if len(amdsmi.amdsmi_get_processor_handles()) > 0: | ||
is_rocm = True | ||
finally: | ||
amdsmi.amdsmi_shut_down() | ||
except Exception: | ||
pass | ||
def __init__(self): | ||
self.platform = _current_platform | ||
|
||
is_hpu = False | ||
try: | ||
from importlib import util | ||
is_hpu = util.find_spec('habana_frameworks') is not None | ||
except Exception: | ||
pass | ||
def _refresh_current_platform(self): | ||
"""Refresh the current platform dynamically.""" | ||
global _current_platform | ||
if _current_platform is not self.platform: | ||
self.platform = _current_platform | ||
|
||
is_xpu = False | ||
def __getattr__(self, name): | ||
"""Go pass to the current platform.""" | ||
self._refresh_current_platform() | ||
return getattr(self.platform, name) | ||
|
||
try: | ||
# installed IPEX if the machine has XPUs. | ||
import intel_extension_for_pytorch # noqa: F401 | ||
import oneccl_bindings_for_pytorch # noqa: F401 | ||
import torch | ||
if hasattr(torch, 'xpu') and torch.xpu.is_available(): | ||
is_xpu = True | ||
except Exception: | ||
pass | ||
|
||
is_cpu = False | ||
try: | ||
from importlib.metadata import version | ||
is_cpu = "cpu" in version("vllm") | ||
except Exception: | ||
pass | ||
|
||
is_neuron = False | ||
try: | ||
import transformers_neuronx # noqa: F401 | ||
is_neuron = True | ||
except ImportError: | ||
pass | ||
|
||
is_openvino = False | ||
try: | ||
from importlib.metadata import version | ||
is_openvino = "openvino" in version("vllm") | ||
except Exception: | ||
pass | ||
|
||
if is_tpu: | ||
# people might install pytorch built with cuda but run on tpu | ||
# so we need to check tpu first | ||
from .tpu import TpuPlatform | ||
current_platform = TpuPlatform() | ||
elif is_cuda: | ||
from .cuda import CudaPlatform | ||
current_platform = CudaPlatform() | ||
elif is_rocm: | ||
from .rocm import RocmPlatform | ||
current_platform = RocmPlatform() | ||
elif is_hpu: | ||
from .hpu import HpuPlatform | ||
current_platform = HpuPlatform() | ||
elif is_xpu: | ||
from .xpu import XPUPlatform | ||
current_platform = XPUPlatform() | ||
elif is_cpu: | ||
from .cpu import CpuPlatform | ||
current_platform = CpuPlatform() | ||
elif is_neuron: | ||
from .neuron import NeuronPlatform | ||
current_platform = NeuronPlatform() | ||
elif is_openvino: | ||
from .openvino import OpenVinoPlatform | ||
current_platform = OpenVinoPlatform() | ||
else: | ||
current_platform = UnspecifiedPlatform() | ||
# The global variable for other modules to use. | ||
current_platform: CurrentPlatform = CurrentPlatform() | ||
|
||
__all__ = ['Platform', 'PlatformEnum', 'current_platform', 'CpuArchEnum'] |
Oops, something went wrong.