diff --git a/ui/litellm-dashboard/src/components/playground/compareUI/CompareUI.test.tsx b/ui/litellm-dashboard/src/components/playground/compareUI/CompareUI.test.tsx
index 1941943a58..963b752774 100644
--- a/ui/litellm-dashboard/src/components/playground/compareUI/CompareUI.test.tsx
+++ b/ui/litellm-dashboard/src/components/playground/compareUI/CompareUI.test.tsx
@@ -2,6 +2,7 @@ import { render, waitFor } from "@testing-library/react";
import userEvent from "@testing-library/user-event";
import { beforeEach, describe, expect, it, vi } from "vitest";
import CompareUI from "./CompareUI";
+import { makeOpenAIChatCompletionRequest } from "../llm_calls/chat_completion";
vi.mock("../llm_calls/fetch_models", () => ({
fetchAvailableModels: vi.fn().mockResolvedValue([{ model_group: "gpt-4" }, { model_group: "gpt-3.5-turbo" }]),
@@ -11,6 +12,34 @@ vi.mock("../llm_calls/chat_completion", () => ({
makeOpenAIChatCompletionRequest: vi.fn().mockResolvedValue(undefined),
}));
+let capturedOnImageUpload: ((file: File) => false) | null = null;
+
+vi.mock("../chat_ui/ChatImageUpload", () => ({
+ default: ({ onImageUpload }: { onImageUpload: (file: File) => false }) => {
+ capturedOnImageUpload = onImageUpload;
+ return (
+
@@ -22,8 +51,9 @@ vi.mock("./components/ComparisonPanel", () => ({
}));
vi.mock("./components/MessageInput", () => ({
- MessageInput: ({ value, onChange, onSend, disabled }: any) => (
+ MessageInput: ({ value, onChange, onSend, disabled, hasAttachment, uploadComponent }: any) => (
+ {uploadComponent &&
{uploadComponent}
}
),
}));
@@ -51,6 +82,10 @@ beforeEach(() => {
dispatchEvent: () => false,
}),
});
+ global.URL.createObjectURL = vi.fn().mockReturnValue("blob:test-url");
+ global.URL.revokeObjectURL = vi.fn();
+ capturedOnImageUpload = null;
+ vi.clearAllMocks();
});
describe("CompareUI", () => {
@@ -88,4 +123,36 @@ describe("CompareUI", () => {
expect(getByTestId("comparison-panel-1")).toBeInTheDocument();
expect(getByTestId("comparison-panel-2")).toBeInTheDocument();
});
+
+ it("should handle image upload and send message with attachment", async () => {
+ const user = userEvent.setup();
+ const { getByTestId, queryByTestId } = render(
+
,
+ );
+
+ const file = new File(["test content"], "test-image.png", { type: "image/png" });
+
+ await waitFor(() => {
+ expect(capturedOnImageUpload).not.toBeNull();
+ });
+
+ if (capturedOnImageUpload) {
+ capturedOnImageUpload(file);
+ }
+
+ await waitFor(() => {
+ expect(queryByTestId("has-attachment")).toBeInTheDocument();
+ });
+
+ const textarea = getByTestId("message-textarea");
+ await user.type(textarea, "Describe this image");
+
+ const sendButton = getByTestId("send-button");
+ expect(sendButton).not.toBeDisabled();
+ await user.click(sendButton);
+
+ await waitFor(() => {
+ expect(makeOpenAIChatCompletionRequest).toHaveBeenCalled();
+ });
+ });
});
diff --git a/ui/litellm-dashboard/src/components/playground/compareUI/CompareUI.tsx b/ui/litellm-dashboard/src/components/playground/compareUI/CompareUI.tsx
index ef4d65d409..9ba0953505 100644
--- a/ui/litellm-dashboard/src/components/playground/compareUI/CompareUI.tsx
+++ b/ui/litellm-dashboard/src/components/playground/compareUI/CompareUI.tsx
@@ -1,14 +1,16 @@
"use client";
-import React, { useEffect, useMemo, useState } from "react";
-import { v4 as uuidv4 } from "uuid";
-import { Select, Input, Tooltip, Button } from "antd";
-import { ClearOutlined, PlusOutlined } from "@ant-design/icons";
import NotificationsManager from "@/components/molecules/notifications_manager";
-import { fetchAvailableModels } from "../llm_calls/fetch_models";
-import { makeOpenAIChatCompletionRequest } from "../llm_calls/chat_completion";
+import { ClearOutlined, DeleteOutlined, FilePdfOutlined, PlusOutlined } from "@ant-design/icons";
+import { Button, Input, Select, Tooltip } from "antd";
+import { useEffect, useMemo, useState } from "react";
+import { v4 as uuidv4 } from "uuid";
+import ChatImageUpload from "../chat_ui/ChatImageUpload";
+import { createChatDisplayMessage, createChatMultimodalMessage } from "../chat_ui/ChatImageUtils";
import type { TokenUsage } from "../chat_ui/ResponseMetrics";
import type { MessageType, VectorStoreSearchResponse } from "../chat_ui/types";
+import { makeOpenAIChatCompletionRequest } from "../llm_calls/chat_completion";
+import { fetchAvailableModels } from "../llm_calls/fetch_models";
import { ComparisonPanel } from "./components/ComparisonPanel";
import { MessageInput } from "./components/MessageInput";
export interface ComparisonInstance {
@@ -71,6 +73,8 @@ export default function CompareUI({ accessToken, disabledPersonalKeyCreation }:
const [modelOptions, setModelOptions] = useState
([]);
const [isLoadingModels, setIsLoadingModels] = useState(false);
const [inputValue, setInputValue] = useState("");
+ const [uploadedFile, setUploadedFile] = useState(null);
+ const [uploadedFilePreviewUrl, setUploadedFilePreviewUrl] = useState(null);
const [apiKeySource, setApiKeySource] = useState<"session" | "custom">(
disabledPersonalKeyCreation ? "custom" : "session",
);
@@ -82,6 +86,13 @@ export default function CompareUI({ accessToken, disabledPersonalKeyCreation }:
}, 300);
return () => clearTimeout(timer);
}, [customApiKey]);
+ useEffect(() => {
+ return () => {
+ if (uploadedFilePreviewUrl) {
+ URL.revokeObjectURL(uploadedFilePreviewUrl);
+ }
+ };
+ }, [uploadedFilePreviewUrl]);
const effectiveApiKey = useMemo(
() => (apiKeySource === "session" ? accessToken || "" : debouncedCustomApiKey.trim()),
[apiKeySource, accessToken, debouncedCustomApiKey],
@@ -215,6 +226,21 @@ export default function CompareUI({ accessToken, disabledPersonalKeyCreation }:
);
});
};
+ const handleFileUpload = (file: File): false => {
+ if (uploadedFilePreviewUrl) {
+ URL.revokeObjectURL(uploadedFilePreviewUrl);
+ }
+ setUploadedFile(file);
+ setUploadedFilePreviewUrl(URL.createObjectURL(file));
+ return false;
+ };
+ const handleRemoveFile = () => {
+ if (uploadedFilePreviewUrl) {
+ URL.revokeObjectURL(uploadedFilePreviewUrl);
+ }
+ setUploadedFile(null);
+ setUploadedFilePreviewUrl(null);
+ };
const clearAllChats = () => {
setComparisons((prev) =>
prev.map((comparison) => ({
@@ -225,6 +251,7 @@ export default function CompareUI({ accessToken, disabledPersonalKeyCreation }:
})),
);
setInputValue("");
+ handleRemoveFile();
};
const appendAssistantChunk = (comparisonId: string, chunk: string, model?: string) => {
if (!chunk) {
@@ -389,9 +416,10 @@ export default function CompareUI({ accessToken, disabledPersonalKeyCreation }:
);
};
const canUseSessionKey = Boolean(accessToken);
- const handleSendMessage = (input: string) => {
+ const handleSendMessage = async (input: string) => {
const trimmed = input.trim();
- if (!trimmed) {
+ const hasAttachment = Boolean(uploadedFile);
+ if (!trimmed && !hasAttachment) {
return;
}
if (!effectiveApiKey) {
@@ -406,6 +434,17 @@ export default function CompareUI({ accessToken, disabledPersonalKeyCreation }:
NotificationsManager.fromBackend("Select a model before sending a message.");
return;
}
+
+ const apiUserMessage = hasAttachment
+ ? await createChatMultimodalMessage(trimmed, uploadedFile as File)
+ : { role: "user", content: trimmed };
+ const displayUserMessage = createChatDisplayMessage(
+ trimmed,
+ hasAttachment,
+ uploadedFilePreviewUrl || undefined,
+ uploadedFile?.name,
+ );
+
const preparedTargets = new Map<
string,
{
@@ -417,15 +456,19 @@ export default function CompareUI({ accessToken, disabledPersonalKeyCreation }:
guardrails: string[];
temperature: number;
maxTokens: number;
- messages: MessageType[];
+ displayMessages: MessageType[];
+ apiChatHistory: Array<{ role: string; content: string | any[] }>;
}
>();
targetComparisons.forEach((comparison) => {
const traceId = comparison.traceId ?? uuidv4();
- const userMessage: MessageType = {
- role: "user",
- content: trimmed,
- };
+ const apiChatHistory = [
+ ...comparison.messages.map(({ role, content }) => ({
+ role,
+ content: Array.isArray(content) ? content : typeof content === "string" ? content : "",
+ })),
+ apiUserMessage,
+ ];
preparedTargets.set(comparison.id, {
id: comparison.id,
model: comparison.model,
@@ -435,7 +478,8 @@ export default function CompareUI({ accessToken, disabledPersonalKeyCreation }:
guardrails: comparison.guardrails,
temperature: comparison.temperature,
maxTokens: comparison.maxTokens,
- messages: [...comparison.messages, userMessage],
+ displayMessages: [...comparison.messages, displayUserMessage],
+ apiChatHistory,
});
});
if (preparedTargets.size === 0) {
@@ -450,23 +494,22 @@ export default function CompareUI({ accessToken, disabledPersonalKeyCreation }:
return {
...comparison,
traceId: prepared.traceId,
- messages: prepared.messages,
+ messages: prepared.displayMessages,
isLoading: true,
};
}),
);
+ setInputValue("");
+ handleRemoveFile();
+
preparedTargets.forEach((prepared) => {
- const apiChatHistory = prepared.messages.map(({ role, content }) => ({
- role,
- content: typeof content === "string" ? content : "",
- }));
const tags = prepared.tags.length > 0 ? prepared.tags : undefined;
const vectorStoreIds = prepared.vectorStores.length > 0 ? prepared.vectorStores : undefined;
const guardrails = prepared.guardrails.length > 0 ? prepared.guardrails : undefined;
const comparison = comparisons.find((c) => c.id === prepared.id);
const useAdvancedParams = comparison?.useAdvancedParams ?? false;
makeOpenAIChatCompletionRequest(
- apiChatHistory,
+ prepared.apiChatHistory,
(chunk, model) => appendAssistantChunk(prepared.id, chunk, model),
prepared.model,
effectiveApiKey,
@@ -536,18 +579,19 @@ export default function CompareUI({ accessToken, disabledPersonalKeyCreation }:
setInputValue(value);
};
const handleSubmit = () => {
- handleSendMessage(inputValue);
- setInputValue("");
+ void handleSendMessage(inputValue);
};
const handleFollowUpSelect = (question: string) => {
setInputValue(question);
};
const hasMessages = comparisons.some((comparison) => comparison.messages.length > 0);
const isAnyComparisonLoading = comparisons.some((comparison) => comparison.isLoading);
- const showSuggestedPrompts = !hasMessages && !isAnyComparisonLoading;
+ const hasAttachment = Boolean(uploadedFile);
+ const isUploadedFilePdf = Boolean(uploadedFile?.name.toLowerCase().endsWith(".pdf"));
+ const showSuggestedPrompts = !hasMessages && !isAnyComparisonLoading && !hasAttachment;
return (
-
+
@@ -620,7 +664,9 @@ export default function CompareUI({ accessToken, disabledPersonalKeyCreation }:
- {showSuggestedPrompts ? (
+ {hasAttachment ? (
+
Attachment ready to send
+ ) : showSuggestedPrompts ? (
{SUGGESTED_PROMPTS.map((prompt) => (
- ) : haveAllResponses ? (
+ ) : haveAllResponses && !hasAttachment ? (
{GENERIC_FOLLOW_UPS.map((question) => (
+ {uploadedFile && (
+
+
+
+ {isUploadedFilePdf ? (
+
+
+
+ ) : (
+

+ )}
+
+
+
{uploadedFile.name}
+
{isUploadedFilePdf ? "PDF" : "Image"}
+
+
+
+
+ )}
comparison.isLoading)}
+ hasAttachment={hasAttachment}
+ uploadComponent={
+
+ }
/>
diff --git a/ui/litellm-dashboard/src/components/playground/compareUI/components/MessageDisplay.test.tsx b/ui/litellm-dashboard/src/components/playground/compareUI/components/MessageDisplay.test.tsx
index 5e6fa5f739..c635e9555f 100644
--- a/ui/litellm-dashboard/src/components/playground/compareUI/components/MessageDisplay.test.tsx
+++ b/ui/litellm-dashboard/src/components/playground/compareUI/components/MessageDisplay.test.tsx
@@ -17,6 +17,15 @@ vi.mock("../../chat_ui/SearchResultsDisplay", () => ({
SearchResultsDisplay: () =>
SearchResultsDisplay
,
}));
+vi.mock("../../chat_ui/ChatImageRenderer", () => ({
+ default: ({ message }: { message: any }) =>
+ message.imagePreviewUrl ? (
+
+

+
+ ) : null,
+}));
+
describe("MessageDisplay", () => {
it("should render", () => {
const messages: MessageType[] = [
@@ -63,4 +72,24 @@ describe("MessageDisplay", () => {
expect(getByText("2+2 equals 4")).toBeInTheDocument();
expect(getByTestId("response-metrics")).toBeInTheDocument();
});
+
+ it("should display image attachment in user message", () => {
+ const messages: MessageType[] = [
+ {
+ role: "user",
+ content: "What is in this image? [Image attached]",
+ imagePreviewUrl: "blob:test-image-url",
+ },
+ {
+ role: "assistant",
+ content: "This is a test image",
+ model: "gpt-4",
+ },
+ ];
+ const { getByTestId, getByText } = render(
);
+ expect(getByText("What is in this image? [Image attached]")).toBeInTheDocument();
+ expect(getByTestId("chat-image-renderer")).toBeInTheDocument();
+ const image = getByTestId("chat-image-renderer").querySelector("img");
+ expect(image).toHaveAttribute("src", "blob:test-image-url");
+ });
});
diff --git a/ui/litellm-dashboard/src/components/playground/compareUI/components/MessageDisplay.tsx b/ui/litellm-dashboard/src/components/playground/compareUI/components/MessageDisplay.tsx
index 734ddf26da..088e45b91b 100644
--- a/ui/litellm-dashboard/src/components/playground/compareUI/components/MessageDisplay.tsx
+++ b/ui/litellm-dashboard/src/components/playground/compareUI/components/MessageDisplay.tsx
@@ -1,8 +1,9 @@
+import { Bot, Loader2, UserRound } from "lucide-react";
import React from "react";
import ReactMarkdown from "react-markdown";
import { Prism as SyntaxHighlighter } from "react-syntax-highlighter";
import { coy } from "react-syntax-highlighter/dist/esm/styles/prism";
-import { Bot, Loader2, UserRound } from "lucide-react";
+import ChatImageRenderer from "../../chat_ui/ChatImageRenderer";
import ReasoningContent from "../../chat_ui/ReasoningContent";
import ResponseMetrics from "../../chat_ui/ResponseMetrics";
import { SearchResultsDisplay } from "../../chat_ui/SearchResultsDisplay";
@@ -56,6 +57,7 @@ export function MessageDisplay({ messages, isLoading }: MessageDisplayProps) {
hyphens: "auto",
}}
>
+
{
expect(button).toBeDisabled();
});
+
+ it("should enable send button when hasAttachment is true even with empty value", () => {
+ const onChange = vi.fn();
+ const onSend = vi.fn();
+ const uploadComponent = Upload
;
+ const { container, getByTestId } = render(
+ ,
+ );
+ const button = container.querySelector("button") as HTMLButtonElement;
+ expect(getByTestId("upload-component")).toBeInTheDocument();
+ expect(button).not.toBeDisabled();
+ });
});
diff --git a/ui/litellm-dashboard/src/components/playground/compareUI/components/MessageInput.tsx b/ui/litellm-dashboard/src/components/playground/compareUI/components/MessageInput.tsx
index 23c8e89718..67036282b3 100644
--- a/ui/litellm-dashboard/src/components/playground/compareUI/components/MessageInput.tsx
+++ b/ui/litellm-dashboard/src/components/playground/compareUI/components/MessageInput.tsx
@@ -9,13 +9,17 @@ interface MessageInputProps {
onChange: (value: string) => void;
onSend: () => void;
disabled?: boolean;
+ hasAttachment?: boolean;
+ uploadComponent?: React.ReactNode;
}
-export function MessageInput({ value, onChange, onSend, disabled }: MessageInputProps) {
+export function MessageInput({ value, onChange, onSend, disabled, hasAttachment, uploadComponent }: MessageInputProps) {
+ const canSend = !disabled && (value.trim().length > 0 || Boolean(hasAttachment));
+
const handleKeyDown = (e: React.KeyboardEvent) => {
if (e.key === "Enter" && !e.shiftKey) {
e.preventDefault();
- if (!disabled && value.trim()) {
+ if (canSend) {
onSend();
}
}
@@ -24,6 +28,7 @@ export function MessageInput({ value, onChange, onSend, disabled }: MessageInput
return (
+ {uploadComponent &&
{uploadComponent}
}
);