mirror of
https://github.com/vllm-project/vllm.git
synced 2025-12-06 15:04:47 +08:00
[Misc] Rename CohereForAI references to CohereLabs (#30147)
Signed-off-by: Russell Bryant <rbryant@redhat.com>
This commit is contained in:
@@ -666,7 +666,7 @@ These models primarily accept the [`LLM.generate`](./generative_models.md#llmgen
|
||||
| Architecture | Models | Inputs | Example HF Models | [LoRA](../features/lora.md) | [PP](../serving/parallelism_scaling.md) |
|
||||
|--------------|--------|--------|-------------------|----------------------|---------------------------|
|
||||
| `AriaForConditionalGeneration` | Aria | T + I<sup>+</sup> | `rhymes-ai/Aria` | | |
|
||||
| `AyaVisionForConditionalGeneration` | Aya Vision | T + I<sup>+</sup> | `CohereForAI/aya-vision-8b`, `CohereForAI/aya-vision-32b`, etc. | | ✅︎ |
|
||||
| `AyaVisionForConditionalGeneration` | Aya Vision | T + I<sup>+</sup> | `CohereLabs/aya-vision-8b`, `CohereLabs/aya-vision-32b`, etc. | | ✅︎ |
|
||||
| `BeeForConditionalGeneration` | Bee-8B | T + I<sup>E+</sup> | `Open-Bee/Bee-8B-RL`, `Open-Bee/Bee-8B-SFT` | | ✅︎ |
|
||||
| `Blip2ForConditionalGeneration` | BLIP-2 | T + I<sup>E</sup> | `Salesforce/blip2-opt-2.7b`, `Salesforce/blip2-opt-6.7b`, etc. | | ✅︎ |
|
||||
| `ChameleonForConditionalGeneration` | Chameleon | T + I | `facebook/chameleon-7b`, etc. | | ✅︎ |
|
||||
|
||||
@@ -72,7 +72,7 @@ def run_aria(questions: list[str], modality: str) -> ModelRequestData:
|
||||
# Aya Vision
|
||||
def run_aya_vision(questions: list[str], modality: str) -> ModelRequestData:
|
||||
assert modality == "image"
|
||||
model_name = "CohereForAI/aya-vision-8b"
|
||||
model_name = "CohereLabs/aya-vision-8b"
|
||||
|
||||
engine_args = EngineArgs(
|
||||
model=model_name,
|
||||
|
||||
@@ -76,7 +76,7 @@ def load_aria(question: str, image_urls: list[str]) -> ModelRequestData:
|
||||
|
||||
|
||||
def load_aya_vision(question: str, image_urls: list[str]) -> ModelRequestData:
|
||||
model_name = "CohereForAI/aya-vision-8b"
|
||||
model_name = "CohereLabs/aya-vision-8b"
|
||||
|
||||
engine_args = EngineArgs(
|
||||
model=model_name,
|
||||
|
||||
@@ -109,7 +109,7 @@ TEXT_GENERATION_MODELS = {
|
||||
"baichuan-inc/Baichuan2-13B-Chat": PPTestSettings.fast(),
|
||||
"bigscience/bloomz-1b1": PPTestSettings.fast(),
|
||||
"zai-org/chatglm3-6b": PPTestSettings.fast(),
|
||||
"CohereForAI/c4ai-command-r-v01": PPTestSettings.fast(load_format="dummy"),
|
||||
"CohereLabs/c4ai-command-r-v01": PPTestSettings.fast(load_format="dummy"),
|
||||
"databricks/dbrx-instruct": PPTestSettings.fast(load_format="dummy"),
|
||||
"Deci/DeciLM-7B-instruct": PPTestSettings.fast(),
|
||||
"deepseek-ai/deepseek-llm-7b-chat": PPTestSettings.fast(),
|
||||
|
||||
@@ -278,7 +278,7 @@ VLM_TEST_SETTINGS = {
|
||||
marks=[large_gpu_mark(min_gb=64)],
|
||||
),
|
||||
"aya_vision": VLMTestInfo(
|
||||
models=["CohereForAI/aya-vision-8b"],
|
||||
models=["CohereLabs/aya-vision-8b"],
|
||||
test_type=(VLMTestType.IMAGE),
|
||||
prompt_formatter=lambda img_prompt: f"<|START_OF_TURN_TOKEN|><|USER_TOKEN|>{img_prompt}<|END_OF_TURN_TOKEN|><|START_OF_TURN_TOKEN|><|CHATBOT_TOKEN|>", # noqa: E501
|
||||
single_image_prompts=IMAGE_ASSETS.prompts(
|
||||
@@ -294,7 +294,7 @@ VLM_TEST_SETTINGS = {
|
||||
vllm_runner_kwargs={"mm_processor_kwargs": {"crop_to_patches": True}},
|
||||
),
|
||||
"aya_vision-multi_image": VLMTestInfo(
|
||||
models=["CohereForAI/aya-vision-8b"],
|
||||
models=["CohereLabs/aya-vision-8b"],
|
||||
test_type=(VLMTestType.MULTI_IMAGE),
|
||||
prompt_formatter=lambda img_prompt: f"<|START_OF_TURN_TOKEN|><|USER_TOKEN|>{img_prompt}<|END_OF_TURN_TOKEN|><|START_OF_TURN_TOKEN|><|CHATBOT_TOKEN|>", # noqa: E501
|
||||
single_image_prompts=IMAGE_ASSETS.prompts(
|
||||
|
||||
@@ -211,10 +211,10 @@ _TEXT_GENERATION_EXAMPLE_MODELS = {
|
||||
trust_remote_code=True,
|
||||
),
|
||||
"CohereForCausalLM": _HfExamplesInfo(
|
||||
"CohereForAI/c4ai-command-r-v01", trust_remote_code=True
|
||||
"CohereLabs/c4ai-command-r-v01", trust_remote_code=True
|
||||
),
|
||||
"Cohere2ForCausalLM": _HfExamplesInfo(
|
||||
"CohereForAI/c4ai-command-r7b-12-2024",
|
||||
"CohereLabs/c4ai-command-r7b-12-2024",
|
||||
trust_remote_code=True,
|
||||
),
|
||||
"CwmForCausalLM": _HfExamplesInfo("facebook/cwm", min_transformers_version="4.58"),
|
||||
@@ -581,7 +581,7 @@ _AUTOMATIC_CONVERTED_MODELS = {
|
||||
_MULTIMODAL_EXAMPLE_MODELS = {
|
||||
# [Decoder-only]
|
||||
"AriaForConditionalGeneration": _HfExamplesInfo("rhymes-ai/Aria"),
|
||||
"AyaVisionForConditionalGeneration": _HfExamplesInfo("CohereForAI/aya-vision-8b"),
|
||||
"AyaVisionForConditionalGeneration": _HfExamplesInfo("CohereLabs/aya-vision-8b"),
|
||||
"BeeForConditionalGeneration": _HfExamplesInfo(
|
||||
"Open-Bee/Bee-8B-RL",
|
||||
trust_remote_code=True,
|
||||
|
||||
Reference in New Issue
Block a user