TestLLM / litellm /llms /vllm /completion /transformation.py
Raju2024's picture
Upload 1072 files
e3278e4 verified
raw
history blame contribute delete
352 Bytes
"""
Translates from OpenAI's `/v1/chat/completions` to the VLLM sdk `llm.generate`.
NOT RECOMMENDED FOR PRODUCTION USE. Use `hosted_vllm/` instead.
"""
from ...hosted_vllm.chat.transformation import HostedVLLMChatConfig
class VLLMConfig(HostedVLLMChatConfig):
"""
VLLM SDK supports the same OpenAI params as hosted_vllm.
"""
pass