POST /api/chat
AI SDK compatible chat endpoint with RAG (retrieval-augmented generation) and streaming responses.
interface ChatRequest {
messages: Array<{
role: 'user' | 'assistant' | 'system'
content: string
}>
}
curl -X POST "https://example.com/api/chat" \
-H "Content-Type: application/json" \
-d '{
"messages": [
{"role": "user", "content": "How do I build a Nuxt module?"}
]
}'
Streaming AI SDK UI message stream format.
Headers:
Content-Type: text/event-stream; charset=utf-8Cache-Control: no-cacheConnection: keep-aliveStream Format:
0:"Hello"
0:" there"
0:"!"
The response is compatible with AI SDK's useChat() composable.
ai-search:chat:messagesTransform messages before processing.
export default defineNitroPlugin((nitro) => {
nitro.hooks.hook('ai-search:chat:messages', async (ctx, result) => {
// Filter or transform messages
result.messages = result.messages.filter(m => m.role !== 'system')
})
})
Context:
interface ChatMessagesContext {
messages: Message[]
event: H3Event
model: LanguageModel
config: RuntimeConfig
}
ai-search:chat:contextModify search results used as RAG context.
export default defineNitroPlugin((nitro) => {
nitro.hooks.hook('ai-search:chat:context', async (ctx, result) => {
// Modify query or results for context
result.results = result.results.filter(r => r.score > 0.3)
})
})
Context:
interface ChatContextContext {
query: string
results: SearchResult[]
history: Message[]
event: H3Event
config: RuntimeConfig
}
Uses filtered search results (score > 0.2) as context:
Prompt: "Answer from context only, cite sources"
Context: URLs, scores, and markdown content
Falls back to /llms.txt full context:
Prompt: "Provide overview of available topics"
Context: Complete llms.txt content
export default defineNuxtConfig({
aiSearch: {
chat: {
enabled: true,
route: '/api/chat'
},
llm: {
provider: 'openai',
model: 'gpt-4o-mini',
apiKey: process.env.OPENAI_API_KEY
}
}
})
curl -X POST "https://example.com/api/chat" \
-H "Content-Type: application/json" \
-d '{
"messages": [
{"role": "user", "content": "What is Nuxt?"}
]
}'
curl -X POST "https://example.com/api/chat" \
-H "Content-Type": "application/json" \
-d '{
"messages": [
{"role": "user", "content": "What is Nuxt?"},
{"role": "assistant", "content": "Nuxt is a Vue.js framework..."},
{"role": "user", "content": "How do I create a module?"}
]
}'
// Fetch API with streaming
const response = await fetch('/api/chat', {
method: 'POST',
headers: { 'Content-Type': 'application/json' },
body: JSON.stringify({
messages: [
{ role: 'user', content: 'How do I build a Nuxt module?' }
]
})
})
const reader = response.body.getReader()
const decoder = new TextDecoder()
while (true) {
const { done, value } = await reader.read()
if (done) break
console.log(decoder.decode(value))
}
<script setup lang="ts">
import { useChat } from 'ai/vue'
const { messages, input, handleSubmit } = useChat({
api: '/api/chat'
})
</script>
<template>
<div>
<div v-for="m in messages" :key="m.id">
<strong>{{ m.role }}:</strong> {{ m.content }}
</div>
<form @submit="handleSubmit">
<input v-model="input" placeholder="Ask a question..." />
<button type="submit">Send</button>
</form>
</div>
</template>
Invalid Messages:
{
"statusCode": 400,
"message": "Messages array is required"
}
No User Messages:
{
"statusCode": 400,
"message": "At least one user message is required"
}
LLM Not Configured:
{
"statusCode": 500,
"message": "LLM not configured for chat endpoint"
}
Index Not Found:
{
"statusCode": 500,
"message": "Vector database not found"
}
To allow cross-origin requests:
export default defineNuxtConfig({
routeRules: {
'/api/chat': {
cors: true,
headers: {
'Access-Control-Allow-Origin': '*',
'Access-Control-Allow-Methods': 'POST, OPTIONS',
'Access-Control-Allow-Headers': 'Content-Type'
}
}
}
})
import type { Message } from 'ai'
interface ChatRequest {
messages: Message[]
}
const request: ChatRequest = {
messages: [
{ role: 'user', content: 'How do I build a Nuxt module?' }
]
}
const response = await $fetch('/api/chat', {
method: 'POST',
body: request
})