90 lines
3.1 KiB
TypeScript
90 lines
3.1 KiB
TypeScript
// Allow streaming responses up to 30 seconds
|
|
export const maxDuration = 30;
|
|
|
|
export async function POST(req: Request) {
|
|
try {
|
|
const { messages, sessionId } = await req.json();
|
|
|
|
// Transform assistant-ui message format to backend format
|
|
const transformedMessages = messages.map((msg: { role: string; content: unknown }) => ({
|
|
role: msg.role,
|
|
content: Array.isArray(msg.content)
|
|
? msg.content.map((part: { text?: string; content?: string }) => part.text || part.content || '').join('')
|
|
: msg.content
|
|
}));
|
|
|
|
// Get session ID from multiple sources (priority order: body, header, generate new)
|
|
const headerSessionId = req.headers.get('X-Session-ID');
|
|
const effectiveSessionId = sessionId || headerSessionId || `session_${Date.now()}_${Math.random().toString(36).substring(2)}`;
|
|
|
|
console.log(`Using session ID: ${effectiveSessionId}`);
|
|
|
|
// Forward request to our Python backend with enhanced configuration
|
|
|
|
const apiUrl = process.env["NEXT_PUBLIC_API_URL"] || "http://localhost:8000/api";
|
|
const backendResponse = await fetch(apiUrl+'/ai-sdk/chat', {
|
|
method: 'POST',
|
|
headers: {
|
|
'Content-Type': 'application/json',
|
|
'Accept': 'text/plain',
|
|
},
|
|
body: JSON.stringify({
|
|
messages: transformedMessages,
|
|
session_id: effectiveSessionId,
|
|
// Add metadata for better assistant-ui integration
|
|
metadata: {
|
|
source: 'assistant-ui',
|
|
version: '0.10.x',
|
|
timestamp: new Date().toISOString(),
|
|
},
|
|
}),
|
|
});
|
|
|
|
if (!backendResponse.ok) {
|
|
const errorText = await backendResponse.text();
|
|
console.error(`Backend error (${backendResponse.status}):`, errorText);
|
|
throw new Error(`Backend responded with status: ${backendResponse.status}`);
|
|
}
|
|
|
|
// Return the stream from our backend with proper Data Stream Protocol headers
|
|
return new Response(backendResponse.body, {
|
|
headers: {
|
|
'Content-Type': 'text/plain; charset=utf-8',
|
|
'Cache-Control': 'no-cache, no-store, must-revalidate',
|
|
'Connection': 'keep-alive',
|
|
'x-vercel-ai-data-stream': 'v1', // AI SDK compatibility
|
|
'x-assistant-ui-stream': 'v1', // assistant-ui compatibility
|
|
'Access-Control-Allow-Origin': '*',
|
|
'Access-Control-Allow-Methods': 'POST, GET, OPTIONS',
|
|
'Access-Control-Allow-Headers': 'Content-Type, Accept',
|
|
},
|
|
});
|
|
} catch (error) {
|
|
console.error('Chat API error:', error);
|
|
|
|
// Return error in Data Stream Protocol format
|
|
return new Response(
|
|
`3:${JSON.stringify(error instanceof Error ? error.message : 'Unknown error')}\n`,
|
|
{
|
|
status: 500,
|
|
headers: {
|
|
'Content-Type': 'text/plain; charset=utf-8',
|
|
'x-vercel-ai-data-stream': 'v1',
|
|
},
|
|
}
|
|
);
|
|
}
|
|
}
|
|
|
|
// Handle preflight requests
|
|
export async function OPTIONS() {
|
|
return new Response(null, {
|
|
status: 200,
|
|
headers: {
|
|
'Access-Control-Allow-Origin': '*',
|
|
'Access-Control-Allow-Methods': 'POST, GET, OPTIONS',
|
|
'Access-Control-Allow-Headers': 'Content-Type, Accept',
|
|
},
|
|
});
|
|
}
|