Looking for an example of Generative UI + OpenAI assistant API. #1382
Unanswered
gablabelle
asked this question in
Help
Replies: 2 comments
-
Bumping this, has anyone had luck finding an example? |
Beta Was this translation helpful? Give feedback.
0 replies
-
Here is what I've done in order to use Vercel's Generate UI ( import {
createAI,
createStreamableUI,
createStreamableValue,
getMutableAIState,
} from "ai/rsc";
// ... other imports ...
export async function sendUserMessage(
openAiClient: OpenAI,
workspaceId: string,
assistantId: string,
openAiAssistantId: string,
chatId: string,
userId: string,
content: string,
tokenCallback: (tokens: number) => Promise<void>,
existingThreadId?: string
) {
"use server";
const aiState = getMutableAIState();
const textStream = createStreamableValue("");
const spinnerStream = createStreamableUI(
<BotMessage className="items-center">{spinner}</BotMessage>
);
const messageStream = createStreamableUI();
const uiStream = createStreamableUI();
(async () => {
try {
const chatFile = await getChatFiles(chatId);
// Create a thread if needed
const threadId =
existingThreadId ?? (await openAiClient.beta.threads.create({})).id;
// Add a message to the thread
const createMessage = await openAiClient.beta.threads.messages.create(
threadId,
{
role: "user",
content,
...(chatFile?.files
? {
attachments: chatFile.files.map((file) => {
return {
file_id: file.id,
tools: [{ type: "file_search" }],
};
}),
}
: {}),
}
);
// Extract the string from payload
const userContent = getContentFromMessage(createMessage);
const userMessage = {
id: createMessage.id,
role: createMessage.role,
content: userContent,
};
aiState.update({
...aiState.get(),
messages: [
...aiState.get().messages,
{
id: nanoid(),
role: "user",
content,
},
],
});
const functions: Array<AssistantTool> = [
{ type: "file_search" },
{
type: "function",
function: {
name: "generateFlashcards",
description: "Generate flashcards from the user's documents",
parameters: {
type: "object",
properties: {
flashcards: {
type: "array",
description: "The list of flashcards to generate",
items: {
type: "object",
properties: {
question: {
type: "string",
description: "The question to ask",
},
answer: {
type: "string",
description: "The answer to the question",
},
},
required: ["question", "answer"],
},
},
},
required: ["flashcards"],
},
},
},
];
let currentArguments = "";
let runId: string | undefined;
let isSpinnerStopped = false;
let isToolCallCreated = false;
openAiClient.beta.threads.runs
.stream(threadId, {
assistant_id: openAiAssistantId,
tools: [...functions],
additional_instructions: `
If and only if the user requests you to create or generate flashcards, call \`generateFlashcards\`. Generate at least 10 flashcards if possible. If you are asked to generate flashcards again, simply proceed, no questions asked.
Besides that, you can answer other questions normally.
`,
})
.on("runStepCreated", (runStep) => {
runId = runStep.run_id;
})
.on("toolCallCreated", (toolCallCreated) => {
console.log("toolCallCreated", toolCallCreated);
isToolCallCreated = true;
})
.on("toolCallDelta", (toolCallDelta) => {
if (
toolCallDelta.type === "function" &&
toolCallDelta.function?.arguments
) {
currentArguments += toolCallDelta.function.arguments;
}
})
.on("toolCallDone", async (toolCallDone) => {
if (!isSpinnerStopped) {
isSpinnerStopped = true;
spinnerStream.done(null);
}
const isFlashcards =
toolCallDone.type === "function" &&
toolCallDone.function.name === "generateFlashcards";
let assistantMessage: AssistantMessage | undefined;
if (isFlashcards) {
const { flashcards } = JSON.parse(currentArguments);
uiStream.update(
<BotMessage>
<Flashcards flashcards={flashcards} />
</BotMessage>
);
assistantMessage = {
id: nanoid(),
role: "assistant" as Message["role"],
content: "",
display: {
name: "generateFlashcards",
props: {
flashcards,
},
},
};
}
if (assistantMessage) {
await cacheAssistantsThread(
workspaceId,
assistantId,
chatId,
threadId,
userId,
userMessage,
assistantMessage
);
aiState.done({
...aiState.get(),
messages: [...aiState.get().messages, assistantMessage],
});
}
// Submit tool outputs to run
openAiClient.beta.threads.runs
.submitToolOutputsStream(threadId, runId!, {
tool_outputs: [
{
tool_call_id: toolCallDone.id,
output: "Success",
},
],
})
.on("runStepDone", async (runStep) => {
console.log("Submit tool runStepDone", runStep);
await tokenCallback(runStep.usage?.total_tokens || 0);
})
.on("end", () => {
console.log("Submit tool output end", runId);
uiStream.done();
textStream.done();
messageStream.done();
});
})
.on("messageCreated", (messageCreated) => {
if (!isSpinnerStopped) {
isSpinnerStopped = true;
spinnerStream.done(null);
}
messageStream.update(<BotMessageStream content={textStream.value} />);
})
.on("messageDelta", (messageDelta) => {
const content = getContentFromMessage(messageDelta);
textStream.update(content);
})
.on("messageDone", async (message) => {
const assistantContent = getContentFromMessage(message);
const assistantMessage = {
id: message.id,
role: message.role,
content: assistantContent,
};
await cacheAssistantsThread(
workspaceId,
assistantId,
chatId,
threadId,
userId,
userMessage,
assistantMessage
);
aiState.done({
...aiState.get(),
messages: [
...aiState.get().messages,
{
id: nanoid(),
role: "assistant",
content: assistantContent,
},
],
});
})
.on("runStepDone", async (runStep) => {
await tokenCallback(runStep.usage?.total_tokens || 0);
})
.on("end", () => {
console.log("Run end", runId);
if (!isToolCallCreated) {
uiStream.done();
textStream.done();
messageStream.done();
}
})
.on("error", (error) => {
console.error("Run error", runId, error);
});
} catch (error) {
console.error("Server action error:", error);
uiStream.error(error);
textStream.error(error);
messageStream.error(error);
aiState.done({
...aiState.get(),
});
}
})();
const responseId = Date.now();
return {
id: responseId,
attachments: uiStream.value,
spinner: spinnerStream.value,
display: messageStream.value,
};
} |
Beta Was this translation helpful? Give feedback.
0 replies
Sign up for free
to join this conversation on GitHub.
Already have an account?
Sign in to comment
-
Hello,
I'm simply looking for an example of a Generative UI used in combination with the OpenAI assistant API.
Thanks
Beta Was this translation helpful? Give feedback.
All reactions