diff --git a/react/src/components/lablupTalkativotUI/LLMChatCard.tsx b/react/src/components/lablupTalkativotUI/LLMChatCard.tsx index d7305f7fa..a8a208b91 100644 --- a/react/src/components/lablupTalkativotUI/LLMChatCard.tsx +++ b/react/src/components/lablupTalkativotUI/LLMChatCard.tsx @@ -14,6 +14,7 @@ import { DeleteOutlined, LinkOutlined, MoreOutlined, + PictureOutlined, RocketOutlined, } from '@ant-design/icons'; import { Attachments, AttachmentsProps, Sender } from '@ant-design/x'; @@ -32,6 +33,7 @@ import { MenuProps, Tag, theme, + Tooltip, Typography, } from 'antd'; import _ from 'lodash'; @@ -70,6 +72,7 @@ export interface LLMChatCardProps extends CardProps { onSubmitChange?: () => void; showCompareMenuItem?: boolean; modelToken?: string; + isImageGeneration?: boolean; } const LLMChatCard: React.FC = ({ @@ -89,10 +92,12 @@ const LLMChatCard: React.FC = ({ onSubmitChange, showCompareMenuItem, modelToken, + isImageGeneration, ...cardProps }) => { const webuiNavigate = useWebUINavigate(); const [isOpenAttachments, setIsOpenAttachments] = useState(false); + const [loadingImageGeneration, setLoadingImageGeneration] = useState(false); const [modelId, setModelId] = useControllableValue(cardProps, { valuePropName: 'modelId', @@ -221,6 +226,33 @@ const LLMChatCard: React.FC = ({ }, ]); + const generateImage = async (prompt: string, accessKey: string) => { + setLoadingImageGeneration(true); + try { + const response = await fetch( + 'https://stable-diffusion-3m.asia03.app.backend.ai/generate-image', + { + method: 'POST', + headers: { + 'Content-Type': 'application/json', + }, + body: JSON.stringify({ + prompt: prompt, + access_key: accessKey, + }), + }, + ); + if (response.ok) { + const responseData = await response.json(); + return 'data:image/png;base64,' + responseData.image_base64; + } else { + throw new Error('Error generating image'); + } + } finally { + setLoadingImageGeneration(false); + } + }; + return ( = ({ /> } + styles={{ + prefix: { + alignSelf: 'center', + }, + }} prefix={ false} @@ -359,11 +396,11 @@ const LLMChatCard: React.FC = ({ onInputChange(v); } }} - loading={isLoading} + loading={isLoading || loadingImageGeneration} onStop={() => { stop(); }} - onSend={() => { + onSend={async () => { if (input || !_.isEmpty(files)) { const fileList = _.map( files, @@ -376,15 +413,42 @@ const LLMChatCard: React.FC = ({ dataTransfer.items.add(file); }); - append( - { - role: 'user', - content: input, - }, - { - experimental_attachments: dataTransfer.files, - }, - ); + if (isImageGeneration) { + try { + const imageBase64 = await generateImage(input, 'accessKey'); + setMessages((prevMessages) => [ + ...prevMessages, + { + id: _.uniqueId(), + role: 'user', + content: input, + }, + { + id: _.uniqueId(), + role: 'assistant', + content: '', + experimental_attachments: [ + { + contentType: 'image/png', + url: imageBase64, + }, + ], + }, + ]); + } catch (error) { + console.error(error); + } + } else { + append( + { + role: 'user', + content: input, + }, + { + experimental_attachments: dataTransfer.files, + }, + ); + } setTimeout(() => { setInput('');