Skip to content

Commit 158e950

Browse files
authored
Merge pull request #4808 from udecode/registry
Update Registry
2 parents aa343ed + e7f6a91 commit 158e950

File tree

4 files changed

+4
-4
lines changed

4 files changed

+4
-4
lines changed

apps/www/public/r/ai-api.json

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -14,7 +14,7 @@
1414
"files": [
1515
{
1616
"path": "src/registry/app/api/ai/command/route.ts",
17-
"content": "import type {\n ChatMessage,\n ToolName,\n} from '@/registry/components/editor/use-chat';\nimport type { NextRequest } from 'next/server';\n\nimport { createGateway } from '@ai-sdk/gateway';\nimport {\n type LanguageModel,\n type UIMessageStreamWriter,\n createUIMessageStream,\n createUIMessageStreamResponse,\n generateObject,\n streamObject,\n streamText,\n tool,\n} from 'ai';\nimport { NextResponse } from 'next/server';\nimport { type SlateEditor, createSlateEditor, nanoid } from 'platejs';\nimport { z } from 'zod';\n\nimport { BaseEditorKit } from '@/registry/components/editor/editor-base-kit';\nimport { markdownJoinerTransform } from '@/registry/lib/markdown-joiner-transform';\n\nimport {\n buildEditTableMultiCellPrompt,\n getChooseToolPrompt,\n getCommentPrompt,\n getEditPrompt,\n getGeneratePrompt,\n} from './prompt';\n\nexport async function POST(req: NextRequest) {\n const { apiKey: key, ctx, messages: messagesRaw, model } = await req.json();\n\n const { children, selection, toolName: toolNameParam } = ctx;\n\n const editor = createSlateEditor({\n plugins: BaseEditorKit,\n selection,\n value: children,\n });\n\n const apiKey = key || process.env.AI_GATEWAY_API_KEY;\n\n if (!apiKey) {\n return NextResponse.json(\n { error: 'Missing AI Gateway API key.' },\n { status: 401 }\n );\n }\n\n const isSelecting = editor.api.isExpanded();\n\n const gatewayProvider = createGateway({\n apiKey,\n });\n\n try {\n const stream = createUIMessageStream<ChatMessage>({\n execute: async ({ writer }) => {\n let toolName = toolNameParam;\n\n if (!toolName) {\n const prompt = getChooseToolPrompt({\n isSelecting,\n messages: messagesRaw,\n });\n\n const enumOptions = isSelecting\n ? ['generate', 'edit', 'comment']\n : ['generate', 'comment'];\n const modelId = model || 'google/gemini-2.5-flash';\n\n const { object: AIToolName } = await generateObject({\n enum: enumOptions,\n model: gatewayProvider(modelId),\n output: 'enum',\n prompt,\n });\n\n writer.write({\n data: AIToolName as ToolName,\n type: 'data-toolName',\n });\n\n toolName = AIToolName;\n }\n\n const stream = streamText({\n experimental_transform: markdownJoinerTransform(),\n model: gatewayProvider(model || 'openai/gpt-4o-mini'),\n // Not used\n prompt: '',\n tools: {\n comment: getCommentTool(editor, {\n messagesRaw,\n model: gatewayProvider(model || 'google/gemini-2.5-flash'),\n writer,\n }),\n table: getTableTool(editor, {\n messagesRaw,\n model: gatewayProvider(model || 'google/gemini-2.5-flash'),\n writer,\n }),\n },\n prepareStep: async (step) => {\n if (toolName === 'comment') {\n return {\n ...step,\n toolChoice: { toolName: 'comment', type: 'tool' },\n };\n }\n\n if (toolName === 'edit') {\n const [editPrompt, editType] = getEditPrompt(editor, {\n isSelecting,\n messages: messagesRaw,\n });\n\n // Table editing uses the table tool\n if (editType === 'table') {\n return {\n ...step,\n toolChoice: { toolName: 'table', type: 'tool' },\n };\n }\n\n return {\n ...step,\n activeTools: [],\n model:\n editType === 'selection'\n ? //The selection task is more challenging, so we chose to use Gemini 2.5 Flash.\n gatewayProvider(model || 'google/gemini-2.5-flash')\n : gatewayProvider(model || 'openai/gpt-4o-mini'),\n messages: [\n {\n content: editPrompt,\n role: 'user',\n },\n ],\n };\n }\n\n if (toolName === 'generate') {\n const generatePrompt = getGeneratePrompt(editor, {\n isSelecting,\n messages: messagesRaw,\n });\n\n return {\n ...step,\n activeTools: [],\n messages: [\n {\n content: generatePrompt,\n role: 'user',\n },\n ],\n model: gatewayProvider(model || 'openai/gpt-4o-mini'),\n };\n }\n },\n });\n\n writer.merge(stream.toUIMessageStream({ sendFinish: false }));\n },\n });\n\n return createUIMessageStreamResponse({ stream });\n } catch {\n return NextResponse.json(\n { error: 'Failed to process AI request' },\n { status: 500 }\n );\n }\n}\n\nconst getCommentTool = (\n editor: SlateEditor,\n {\n messagesRaw,\n model,\n writer,\n }: {\n messagesRaw: ChatMessage[];\n model: LanguageModel;\n writer: UIMessageStreamWriter<ChatMessage>;\n }\n) =>\n tool({\n description: 'Comment on the content',\n inputSchema: z.object({}),\n execute: async () => {\n const { elementStream } = streamObject({\n model,\n output: 'array',\n prompt: getCommentPrompt(editor, {\n messages: messagesRaw,\n }),\n schema: z\n .object({\n blockId: z\n .string()\n .describe(\n 'The id of the starting block. If the comment spans multiple blocks, use the id of the first block.'\n ),\n comment: z\n .string()\n .describe('A brief comment or explanation for this fragment.'),\n content: z\n .string()\n .describe(\n String.raw`The original document fragment to be commented on.It can be the entire block, a small part within a block, or span multiple blocks. If spanning multiple blocks, separate them with two \\n\\n.`\n ),\n })\n .describe('A single comment'),\n });\n\n for await (const comment of elementStream) {\n const commentDataId = nanoid();\n\n writer.write({\n id: commentDataId,\n data: {\n comment,\n status: 'streaming',\n },\n type: 'data-comment',\n });\n }\n\n writer.write({\n id: nanoid(),\n data: {\n comment: null,\n status: 'finished',\n },\n type: 'data-comment',\n });\n },\n });\n\nconst getTableTool = (\n editor: SlateEditor,\n {\n messagesRaw,\n model,\n writer,\n }: {\n messagesRaw: ChatMessage[];\n model: LanguageModel;\n writer: UIMessageStreamWriter<ChatMessage>;\n }\n) =>\n tool({\n description: 'Edit table cells',\n inputSchema: z.object({}),\n execute: async () => {\n const { elementStream } = streamObject({\n model,\n output: 'array',\n prompt: buildEditTableMultiCellPrompt(editor, messagesRaw),\n schema: z\n .object({\n content: z\n .string()\n .describe(\n String.raw`The new content for the cell. Can contain multiple paragraphs separated by \\n\\n.`\n ),\n id: z.string().describe('The id of the table cell to update.'),\n })\n .describe('A table cell update'),\n });\n\n for await (const cellUpdate of elementStream) {\n writer.write({\n id: nanoid(),\n data: {\n cellUpdate,\n status: 'streaming',\n },\n type: 'data-table',\n });\n }\n\n writer.write({\n id: nanoid(),\n data: {\n cellUpdate: null,\n status: 'finished',\n },\n type: 'data-table',\n });\n },\n });\n",
17+
"content": "import type {\n ChatMessage,\n ToolName,\n} from '@/registry/components/editor/use-chat';\nimport type { NextRequest } from 'next/server';\n\nimport { createGateway } from '@ai-sdk/gateway';\nimport {\n type LanguageModel,\n type UIMessageStreamWriter,\n createUIMessageStream,\n createUIMessageStreamResponse,\n generateText,\n Output,\n streamText,\n tool,\n} from 'ai';\nimport { NextResponse } from 'next/server';\nimport { type SlateEditor, createSlateEditor, nanoid } from 'platejs';\nimport { z } from 'zod';\n\nimport { BaseEditorKit } from '@/registry/components/editor/editor-base-kit';\nimport { markdownJoinerTransform } from '@/registry/lib/markdown-joiner-transform';\n\nimport {\n buildEditTableMultiCellPrompt,\n getChooseToolPrompt,\n getCommentPrompt,\n getEditPrompt,\n getGeneratePrompt,\n} from './prompt';\n\nexport async function POST(req: NextRequest) {\n const { apiKey: key, ctx, messages: messagesRaw, model } = await req.json();\n\n const { children, selection, toolName: toolNameParam } = ctx;\n\n const editor = createSlateEditor({\n plugins: BaseEditorKit,\n selection,\n value: children,\n });\n\n const apiKey = key || process.env.AI_GATEWAY_API_KEY;\n\n if (!apiKey) {\n return NextResponse.json(\n { error: 'Missing AI Gateway API key.' },\n { status: 401 }\n );\n }\n\n const isSelecting = editor.api.isExpanded();\n\n const gatewayProvider = createGateway({\n apiKey,\n });\n\n try {\n const stream = createUIMessageStream<ChatMessage>({\n execute: async ({ writer }) => {\n let toolName = toolNameParam;\n\n if (!toolName) {\n const prompt = getChooseToolPrompt({\n isSelecting,\n messages: messagesRaw,\n });\n\n const enumOptions = isSelecting\n ? ['generate', 'edit', 'comment']\n : ['generate', 'comment'];\n const modelId = model || 'google/gemini-2.5-flash';\n\n const { output: AIToolName } = await generateText({\n model: gatewayProvider(modelId),\n output: Output.choice({ options: enumOptions }),\n prompt,\n });\n\n writer.write({\n data: AIToolName as ToolName,\n type: 'data-toolName',\n });\n\n toolName = AIToolName;\n }\n\n const stream = streamText({\n experimental_transform: markdownJoinerTransform(),\n model: gatewayProvider(model || 'openai/gpt-4o-mini'),\n // Not used\n prompt: '',\n tools: {\n comment: getCommentTool(editor, {\n messagesRaw,\n model: gatewayProvider(model || 'google/gemini-2.5-flash'),\n writer,\n }),\n table: getTableTool(editor, {\n messagesRaw,\n model: gatewayProvider(model || 'google/gemini-2.5-flash'),\n writer,\n }),\n },\n prepareStep: async (step) => {\n if (toolName === 'comment') {\n return {\n ...step,\n toolChoice: { toolName: 'comment', type: 'tool' },\n };\n }\n\n if (toolName === 'edit') {\n const [editPrompt, editType] = getEditPrompt(editor, {\n isSelecting,\n messages: messagesRaw,\n });\n\n // Table editing uses the table tool\n if (editType === 'table') {\n return {\n ...step,\n toolChoice: { toolName: 'table', type: 'tool' },\n };\n }\n\n return {\n ...step,\n activeTools: [],\n model:\n editType === 'selection'\n ? //The selection task is more challenging, so we chose to use Gemini 2.5 Flash.\n gatewayProvider(model || 'google/gemini-2.5-flash')\n : gatewayProvider(model || 'openai/gpt-4o-mini'),\n messages: [\n {\n content: editPrompt,\n role: 'user',\n },\n ],\n };\n }\n\n if (toolName === 'generate') {\n const generatePrompt = getGeneratePrompt(editor, {\n isSelecting,\n messages: messagesRaw,\n });\n\n return {\n ...step,\n activeTools: [],\n messages: [\n {\n content: generatePrompt,\n role: 'user',\n },\n ],\n model: gatewayProvider(model || 'openai/gpt-4o-mini'),\n };\n }\n },\n });\n\n writer.merge(stream.toUIMessageStream({ sendFinish: false }));\n },\n });\n\n return createUIMessageStreamResponse({ stream });\n } catch {\n return NextResponse.json(\n { error: 'Failed to process AI request' },\n { status: 500 }\n );\n }\n}\n\nconst getCommentTool = (\n editor: SlateEditor,\n {\n messagesRaw,\n model,\n writer,\n }: {\n messagesRaw: ChatMessage[];\n model: LanguageModel;\n writer: UIMessageStreamWriter<ChatMessage>;\n }\n) =>\n tool({\n description: 'Comment on the content',\n inputSchema: z.object({}),\n strict: true,\n execute: async () => {\n const commentSchema = z.object({\n blockId: z\n .string()\n .describe(\n 'The id of the starting block. If the comment spans multiple blocks, use the id of the first block.'\n ),\n comment: z\n .string()\n .describe('A brief comment or explanation for this fragment.'),\n content: z\n .string()\n .describe(\n String.raw`The original document fragment to be commented on.It can be the entire block, a small part within a block, or span multiple blocks. If spanning multiple blocks, separate them with two \\n\\n.`\n ),\n });\n\n const { partialOutputStream } = streamText({\n model,\n output: Output.array({ element: commentSchema }),\n prompt: getCommentPrompt(editor, {\n messages: messagesRaw,\n }),\n });\n\n let lastLength = 0;\n\n for await (const partialArray of partialOutputStream) {\n for (let i = lastLength; i < partialArray.length; i++) {\n const comment = partialArray[i];\n const commentDataId = nanoid();\n\n writer.write({\n id: commentDataId,\n data: {\n comment,\n status: 'streaming',\n },\n type: 'data-comment',\n });\n }\n\n lastLength = partialArray.length;\n }\n\n writer.write({\n id: nanoid(),\n data: {\n comment: null,\n status: 'finished',\n },\n type: 'data-comment',\n });\n },\n });\n\nconst getTableTool = (\n editor: SlateEditor,\n {\n messagesRaw,\n model,\n writer,\n }: {\n messagesRaw: ChatMessage[];\n model: LanguageModel;\n writer: UIMessageStreamWriter<ChatMessage>;\n }\n) =>\n tool({\n description: 'Edit table cells',\n inputSchema: z.object({}),\n strict: true,\n execute: async () => {\n const cellUpdateSchema = z.object({\n content: z\n .string()\n .describe(\n String.raw`The new content for the cell. Can contain multiple paragraphs separated by \\n\\n.`\n ),\n id: z.string().describe('The id of the table cell to update.'),\n });\n\n const { partialOutputStream } = streamText({\n model,\n output: Output.array({ element: cellUpdateSchema }),\n prompt: buildEditTableMultiCellPrompt(editor, messagesRaw),\n });\n\n let lastLength = 0;\n\n for await (const partialArray of partialOutputStream) {\n for (let i = lastLength; i < partialArray.length; i++) {\n const cellUpdate = partialArray[i];\n\n writer.write({\n id: nanoid(),\n data: {\n cellUpdate,\n status: 'streaming',\n },\n type: 'data-table',\n });\n }\n\n lastLength = partialArray.length;\n }\n\n writer.write({\n id: nanoid(),\n data: {\n cellUpdate: null,\n status: 'finished',\n },\n type: 'data-table',\n });\n },\n });\n",
1818
"type": "registry:file",
1919
"target": "app/api/ai/command/route.ts"
2020
},

apps/www/public/r/components-changelog-docs.json

Lines changed: 1 addition & 1 deletion
Large diffs are not rendered by default.

apps/www/public/r/use-chat.json

Lines changed: 1 addition & 1 deletion
Large diffs are not rendered by default.

apps/www/public/tailwind.css

Lines changed: 1 addition & 1 deletion
Some generated files are not rendered by default. Learn more about customizing how changed files appear on GitHub.

0 commit comments

Comments
 (0)