This commit is contained in:
2025-09-26 17:15:54 +08:00
commit db0e5965ec
211 changed files with 40437 additions and 0 deletions

View File

@@ -0,0 +1,89 @@
// Allow streaming responses up to 30 seconds
export const maxDuration = 30;
export async function POST(req: Request) {
try {
const { messages, sessionId } = await req.json();
// Transform assistant-ui message format to backend format
const transformedMessages = messages.map((msg: { role: string; content: unknown }) => ({
role: msg.role,
content: Array.isArray(msg.content)
? msg.content.map((part: { text?: string; content?: string }) => part.text || part.content || '').join('')
: msg.content
}));
// Get session ID from multiple sources (priority order: body, header, generate new)
const headerSessionId = req.headers.get('X-Session-ID');
const effectiveSessionId = sessionId || headerSessionId || `session_${Date.now()}_${Math.random().toString(36).substring(2)}`;
console.log(`Using session ID: ${effectiveSessionId}`);
// Forward request to our Python backend with enhanced configuration
const apiUrl = process.env["NEXT_PUBLIC_API_URL"] || "http://localhost:8000/api";
const backendResponse = await fetch(apiUrl+'/ai-sdk/chat', {
method: 'POST',
headers: {
'Content-Type': 'application/json',
'Accept': 'text/plain',
},
body: JSON.stringify({
messages: transformedMessages,
session_id: effectiveSessionId,
// Add metadata for better assistant-ui integration
metadata: {
source: 'assistant-ui',
version: '0.10.x',
timestamp: new Date().toISOString(),
},
}),
});
if (!backendResponse.ok) {
const errorText = await backendResponse.text();
console.error(`Backend error (${backendResponse.status}):`, errorText);
throw new Error(`Backend responded with status: ${backendResponse.status}`);
}
// Return the stream from our backend with proper Data Stream Protocol headers
return new Response(backendResponse.body, {
headers: {
'Content-Type': 'text/plain; charset=utf-8',
'Cache-Control': 'no-cache, no-store, must-revalidate',
'Connection': 'keep-alive',
'x-vercel-ai-data-stream': 'v1', // AI SDK compatibility
'x-assistant-ui-stream': 'v1', // assistant-ui compatibility
'Access-Control-Allow-Origin': '*',
'Access-Control-Allow-Methods': 'POST, GET, OPTIONS',
'Access-Control-Allow-Headers': 'Content-Type, Accept',
},
});
} catch (error) {
console.error('Chat API error:', error);
// Return error in Data Stream Protocol format
return new Response(
`3:${JSON.stringify(error instanceof Error ? error.message : 'Unknown error')}\n`,
{
status: 500,
headers: {
'Content-Type': 'text/plain; charset=utf-8',
'x-vercel-ai-data-stream': 'v1',
},
}
);
}
}
// Handle preflight requests
export async function OPTIONS() {
return new Response(null, {
status: 200,
headers: {
'Access-Control-Allow-Origin': '*',
'Access-Control-Allow-Methods': 'POST, GET, OPTIONS',
'Access-Control-Allow-Headers': 'Content-Type, Accept',
},
});
}

View File

@@ -0,0 +1,8 @@
import { NextResponse } from 'next/server';
export async function GET() {
return NextResponse.json({
apiUrlPrefix: process.env["NEXT_PUBLIC_API_URL_PREFIX"] || "",
apiUrl: process.env["NEXT_PUBLIC_API_URL"] || "http://localhost:8000",
});
}

View File

@@ -0,0 +1,9 @@
import { NextResponse } from 'next/server';
export async function GET() {
return NextResponse.json({
status: 'ok',
timestamp: new Date().toISOString(),
service: 'agentic-rag-web'
});
}

View File

@@ -0,0 +1,72 @@
import { NextRequest, NextResponse } from "next/server";
export const runtime = "edge";
function getCorsHeaders() {
return {
"Access-Control-Allow-Origin": "*",
"Access-Control-Allow-Methods": "GET, POST, PUT, PATCH, DELETE, OPTIONS",
"Access-Control-Allow-Headers": "*",
};
}
async function handleRequest(req: NextRequest, method: string) {
try {
const path = req.nextUrl.pathname.replace(/^\/?api\/langgraph\//, "");
const url = new URL(req.url);
const searchParams = new URLSearchParams(url.search);
searchParams.delete("_path");
searchParams.delete("nxtP_path");
const queryString = searchParams.toString()
? `?${searchParams.toString()}`
: "";
const options: RequestInit = {
method,
headers: {
"Content-Type": "application/json",
// Add auth header if needed for production
// "x-api-key": process.env["LANGCHAIN_API_KEY"] || "",
},
};
if (["POST", "PUT", "PATCH"].includes(method)) {
options.body = await req.text();
}
// Forward to our FastAPI backend
const backendUrl = process.env["LANGGRAPH_API_URL"] || "http://localhost:8000";
const res = await fetch(
`${backendUrl}/api/${path}${queryString}`,
options,
);
return new NextResponse(res.body, {
status: res.status,
statusText: res.statusText,
headers: {
...res.headers,
...getCorsHeaders(),
},
});
} catch (e: unknown) {
const error = e as { message?: string; status?: number };
return NextResponse.json({ error: error.message || 'Internal Server Error' }, { status: error.status ?? 500 });
}
}
export const GET = (req: NextRequest) => handleRequest(req, "GET");
export const POST = (req: NextRequest) => handleRequest(req, "POST");
export const PUT = (req: NextRequest) => handleRequest(req, "PUT");
export const PATCH = (req: NextRequest) => handleRequest(req, "PATCH");
export const DELETE = (req: NextRequest) => handleRequest(req, "DELETE");
// Add a new OPTIONS handler
export const OPTIONS = () => {
return new NextResponse(null, {
status: 204,
headers: {
...getCorsHeaders(),
},
});
};