Skip to content

Commit

Permalink
open ai support and tool bug fixes
Browse files Browse the repository at this point in the history
  • Loading branch information
RomneyDa committed Jan 11, 2025
1 parent 361b857 commit d51d46b
Show file tree
Hide file tree
Showing 6 changed files with 129 additions and 91 deletions.
96 changes: 69 additions & 27 deletions core/llm/openaiTypeConverters.ts
Original file line number Diff line number Diff line change
Expand Up @@ -19,38 +19,80 @@ export function toChatMessage(
tool_call_id: message.toolCallId,
};
}

if (typeof message.content === "string") {
return {
role: message.role,
content: message.content === "" ? " " : message.content, // LM Studio API doesn't accept empty strings
};
} else if (!message.content.some((item) => item.type !== "text")) {
// If no multi-media is in the message, just send as text
// for compatibility with OpenAI-"compatible" servers
// that don't support multi-media format
if (message.role === "system") {
return {
...message,
content: message.content.map((item) => item.text).join(""),
role: "system",
content: message.content,
};
}

const parts = message.content.map((part) => {
const msg: any = {
type: part.type,
text: part.text,
};
if (part.type === "imageUrl") {
msg.image_url = { ...part.imageUrl, detail: "auto" };
msg.type = "image_url";
}
return msg;
});

return {
...message,
content: parts,
let msg: ChatCompletionMessageParam = {
role: message.role,
content:
typeof message.content === "string"
? message.content === ""
? " "
: message.content
: !message.content.some((item) => item.type !== "text")
? message.content.map((item) => item.text).join("")
: message.content.map((part) => {
const msg: any = {
type: part.type,
text: part.text,
};
if (part.type === "imageUrl") {
msg.image_url = { ...part.imageUrl, detail: "auto" };
msg.type = "image_url";
}
return msg;
}),
};
if (
message.role === "assistant" &&
message.toolCalls &&
msg.role === "assistant"
) {
msg.tool_calls = message.toolCalls.map((toolCall) => ({
id: toolCall.id!,
type: toolCall.type!,
function: {
name: toolCall.function?.name!,
arguments: toolCall.function?.arguments!,
},
}));
}
return msg;
// if (typeof message.content === "string") {
// return {
// role: message.role,
// content: message.content === "" ? " " : message.content, // LM Studio API doesn't accept empty strings
// };
// } else if (!message.content.some((item) => item.type !== "text")) {
// // If no multi-media is in the message, just send as text
// // for compatibility with OpenAI-"compatible" servers
// // that don't support multi-media format
// return {
// ...message,
// content: message.content.map((item) => item.text).join(""),
// };
// }

// const parts = message.content.map((part) => {
// const msg: any = {
// type: part.type,
// text: part.text,
// };
// if (part.type === "imageUrl") {
// msg.image_url = { ...part.imageUrl, detail: "auto" };
// msg.type = "image_url";
// }
// return msg;
// });

// return {
// ...message,
// content: parts,
// };
}

export function toChatBody(
Expand Down
18 changes: 9 additions & 9 deletions core/llm/toolSupport.ts
Original file line number Diff line number Diff line change
Expand Up @@ -11,15 +11,15 @@ export const PROVIDER_TOOL_SUPPORT: Record<
return true;
}
},
// openai: (model) => {
// if (
// ["gpt-4", "o1", "chatgpt-4o-latest"].some((part) =>
// model.toLowerCase().startsWith(part),
// )
// ) {
// return true;
// }
// },
openai: (model) => {
if (
["gpt-4", "o1", "chatgpt-4o-latest"].some((part) =>
model.toLowerCase().startsWith(part),
)
) {
return true;
}
},
// https://ollama.com/search?c=tools
ollama: (model) => {
if (
Expand Down
54 changes: 24 additions & 30 deletions gui/src/components/StepContainer/ResponseActions.tsx
Original file line number Diff line number Diff line change
Expand Up @@ -14,7 +14,6 @@ export interface ResponseActionsProps {
index: number;
onDelete: () => void;
item: ChatHistoryItem;
shouldHideActions: boolean;
}

export default function ResponseActions({
Expand All @@ -23,7 +22,6 @@ export default function ResponseActions({
item,
isTruncated,
onDelete,
shouldHideActions,
}: ResponseActionsProps) {
const isInEditMode = useAppSelector(selectIsInEditMode);

Expand All @@ -33,37 +31,33 @@ export default function ResponseActions({

return (
<div className="mx-2 flex cursor-default items-center justify-end space-x-1 bg-transparent pb-0 text-xs text-gray-400">
{shouldHideActions || (
<>
{isTruncated && (
<HeaderButtonWithToolTip
tabIndex={-1}
text="Continue generation"
onClick={onContinueGeneration}
>
<BarsArrowDownIcon className="h-3.5 w-3.5 text-gray-500" />
</HeaderButtonWithToolTip>
)}
{isTruncated && (
<HeaderButtonWithToolTip
tabIndex={-1}
text="Continue generation"
onClick={onContinueGeneration}
>
<BarsArrowDownIcon className="h-3.5 w-3.5 text-gray-500" />
</HeaderButtonWithToolTip>
)}

<HeaderButtonWithToolTip
testId={`delete-button-${index}`}
text="Delete"
tabIndex={-1}
onClick={onDelete}
>
<TrashIcon className="h-3.5 w-3.5 text-gray-500" />
</HeaderButtonWithToolTip>
<HeaderButtonWithToolTip
testId={`delete-button-${index}`}
text="Delete"
tabIndex={-1}
onClick={onDelete}
>
<TrashIcon className="h-3.5 w-3.5 text-gray-500" />
</HeaderButtonWithToolTip>

<CopyIconButton
tabIndex={-1}
text={renderChatMessage(item.message)}
clipboardIconClassName="h-3.5 w-3.5 text-gray-500"
checkIconClassName="h-3.5 w-3.5 text-green-400"
/>
<CopyIconButton
tabIndex={-1}
text={renderChatMessage(item.message)}
clipboardIconClassName="h-3.5 w-3.5 text-gray-500"
checkIconClassName="h-3.5 w-3.5 text-green-400"
/>

<FeedbackButtons item={item} />
</>
)}
<FeedbackButtons item={item} />
</div>
);
}
30 changes: 15 additions & 15 deletions gui/src/components/StepContainer/StepContainer.tsx
Original file line number Diff line number Diff line change
Expand Up @@ -35,9 +35,8 @@ export default function StepContainer(props: StepContainerProps) {
);
const uiConfig = useAppSelector(selectUIConfig);

const shouldHideActions =
(isStreaming && props.isLast) ||
historyItemAfterThis?.message.role === "assistant";
const hideActionSpace = historyItemAfterThis?.message.role === "assistant";
const hideActions = hideActionSpace || (isStreaming && props.isLast);

// const isStepAheadOfCurCheckpoint =
// isInEditMode && Math.floor(props.index / 2) > curCheckpointIndex;
Expand Down Expand Up @@ -100,18 +99,19 @@ export default function StepContainer(props: StepContainerProps) {
{props.isLast && <ThinkingIndicator historyItem={props.item} />}
</ContentDiv>
{/* We want to occupy space in the DOM regardless of whether the actions are visible to avoid jank on stream complete */}
<div className={`mt-2 h-7 transition-opacity duration-300 ease-in-out`}>
{!shouldHideActions && (
<ResponseActions
isTruncated={isTruncated}
onDelete={onDelete}
onContinueGeneration={onContinueGeneration}
index={props.index}
item={props.item}
shouldHideActions={shouldHideActions}
/>
)}
</div>
{!hideActionSpace && (
<div className={`mt-2 h-7 transition-opacity duration-300 ease-in-out`}>
{!hideActions && (
<ResponseActions
isTruncated={isTruncated}
onDelete={onDelete}
onContinueGeneration={onContinueGeneration}
index={props.index}
item={props.item}
/>
)}
</div>
)}
</div>
);
}
21 changes: 11 additions & 10 deletions gui/src/redux/slices/sessionSlice.ts
Original file line number Diff line number Diff line change
Expand Up @@ -18,7 +18,6 @@ import {
PromptLog,
Session,
SessionMetadata,
ToolCall,
ToolCallDelta,
ToolCallState,
} from "core";
Expand Down Expand Up @@ -284,9 +283,6 @@ export const sessionSlice = createSlice({
},
streamUpdate: (state, action: PayloadAction<ChatMessage[]>) => {
if (state.history.length) {
const lastItem = state.history[state.history.length - 1];
const lastMessage = lastItem.message;

function toolCallDeltaToState(
toolCallDelta: ToolCallDelta,
): ToolCallState {
Expand All @@ -309,13 +305,18 @@ export const sessionSlice = createSlice({
}

for (const message of action.payload) {
const lastItem = state.history[state.history.length - 1];
const lastMessage = lastItem.message;
if (
message.role &&
(lastMessage.role !== message.role ||
// This is when a tool call comes after assistant text
(lastMessage.content !== "" &&
message.role === "assistant" &&
message.toolCalls?.length))
lastMessage.role !== message.role ||
// This is for when a tool call comes immediately before/after tool call
(lastMessage.role === "assistant" &&
message.role === "assistant" &&
// Last message isn't completely new
!(!lastMessage.toolCalls?.length && !lastMessage.content) &&
// And there's a difference in tool call presence
(lastMessage.toolCalls?.length ?? 0) !==
(message.toolCalls?.length ?? 0))
) {
// Create a new message
const historyItem: ChatHistoryItemWithMessageId = {
Expand Down
1 change: 1 addition & 0 deletions gui/src/redux/thunks/streamThunkWrapper.tsx
Original file line number Diff line number Diff line change
Expand Up @@ -17,6 +17,7 @@ export const streamThunkWrapper = createAsyncThunk<
dispatch(setDialogMessage(<StreamErrorDialog error={e} />));
dispatch(setShowDialog(true));
} finally {
console.log(getState().session.history);
dispatch(setInactive());
const state = getState();
if (state.session.mode === "chat") {
Expand Down

0 comments on commit d51d46b

Please sign in to comment.