Skip to content
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension


Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
1 change: 1 addition & 0 deletions .gitignore
Original file line number Diff line number Diff line change
Expand Up @@ -26,6 +26,7 @@ coverage
out/
build
dist
apps/widget/public/embed.global.js


# Debug
Expand Down
61 changes: 58 additions & 3 deletions apps/web/app/api/chat/stream/route.ts
Original file line number Diff line number Diff line change
Expand Up @@ -2,11 +2,42 @@ import { NextRequest, NextResponse } from "next/server";
import { streamText } from "ai";
import { ConvexHttpClient } from "convex/browser";
import { api } from "@workspace/backend/convex/_generated/api";
import { auth } from "@clerk/nextjs/server";

// ✅ CRITICAL: Set execution timeout for long-running streams
export const maxDuration = 60; // Seconds - adjust based on Vercel plan
export const runtime = "nodejs";

type EscalationConfig = {
enabled?: boolean;
whatsapp?: string | null;
email?: string | null;
};

function buildEscalationPrompt(escalation?: EscalationConfig) {
if (!escalation?.enabled) return null;

const whatsappDigits = (escalation.whatsapp || "").replace(/\D/g, "");
const email = (escalation.email || "").trim();

if (!whatsappDigits || !email) {
return null;
}

const whatsappLink = `https://wa.me/${whatsappDigits}`;
const emailLink = `mailto:${email}`;

return [
"Escalation Protocol:",
"- If you cannot answer from the Knowledge Base or the user asks to contact a human, you MUST include the section below exactly.",
"- Do not add any other links anywhere in the response.",
"",
"### Contact Support",
`[Chat WhatsApp](${whatsappLink})`,
`[Email Us](${emailLink})`,
].join("\n");
}

/**
* Streaming endpoint for bot responses
*
Expand All @@ -24,6 +55,11 @@ export const runtime = "nodejs";
*/
export async function POST(request: NextRequest) {
try {
const { userId, getToken } = await auth();
if (!userId) {
return NextResponse.json({ error: "Unauthorized" }, { status: 401 });
}

const { botId, conversationId, userMessage } = await request.json();

// Step 1: Validate inputs
Expand All @@ -43,8 +79,16 @@ export async function POST(request: NextRequest) {
);
}

// Step 2: Create Convex client (server-side only)
// Step 2: Create Convex client (server-side only) with auth
const convex = new ConvexHttpClient(process.env.NEXT_PUBLIC_CONVEX_URL!);
const convexToken = await getToken({ template: "convex" });
if (!convexToken) {
return NextResponse.json(
{ error: "Unauthorized: Missing Convex auth token" },
{ status: 401 },
);
}
convex.setAuth(convexToken);

// Step 3: Fetch context in parallel (before streaming starts)
const startTime = Date.now();
Expand All @@ -60,6 +104,7 @@ export async function POST(request: NextRequest) {

// Query 2: Conversation history
convex.query(api.ai.getConversationHistoryForStream, {
botId: botId as any,
conversationId: conversationId as any,
}),
]);
Expand Down Expand Up @@ -93,9 +138,12 @@ export async function POST(request: NextRequest) {
// Step 6: Build system prompt
const baseSystemPrompt =
botConfig.system_prompt || "You are a helpful assistant.";
const systemPrompt = baseSystemPrompt;
const escalationPrompt = buildEscalationPrompt(botConfig.escalation);
const systemPrompt = escalationPrompt
? `${baseSystemPrompt}\n\n${escalationPrompt}`
: baseSystemPrompt;

// Step 7: Initialize AI model (OpenAI, Groq, Google, etc.)
// Step 7: Initialize AI model (OpenAI, Groq, Google, Anthropic, etc.)
let model;
try {
switch (botConfig.model_provider) {
Expand All @@ -118,6 +166,13 @@ export async function POST(request: NextRequest) {
);
break;
}
case "Anthropic": {
const { createAnthropic } = await import("@ai-sdk/anthropic");
model = createAnthropic({ apiKey: botConfig.api_key })(
botConfig.model_id,
);
break;
}
default:
return NextResponse.json(
{
Expand Down
Loading