Skip to content

Next JS (Langchain)

If your AI chatbot is running on a NextJS backend with Langchain.js SDK, use this guide to send user conversation data to Growl.

If you’re using ChatOpenAI or a similar Langchain Provider to generate LLM responses, you can send conversation data to Growl by adding the callback method.

This approach works for both stream and invoke functions.

import { NextRequest, NextResponse } from "next/server";
import { Message as VercelChatMessage, StreamingTextResponse } from "ai";
import { ChatOpenAI } from "@langchain/openai";
import { PromptTemplate } from "@langchain/core/prompts";
import { HttpResponseOutputParser } from "langchain/output_parsers";
import recordGrowlEvent from "./growl";
const formatMessage = (message: VercelChatMessage) => {
return `${message.role}: ${message.content}`;
};
const TEMPLATE = `
Current conversation:
{chat_history}
User: {input}
AI:`;
export async function POST(req: NextRequest) {
const body = await req.json();
const messages = body.messages ?? [];
// visitor_id is required - must be obtained from window.GrowlAds.getVisitorId() on client-side
const visitor_id: string = body.visitor_id;
const prompt = PromptTemplate.fromTemplate(TEMPLATE);
const headersObject = Object.fromEntries(req.headers.entries());
const model = new ChatOpenAI({
model: "gpt-4o-mini",
callbacks: [
{
handleLLMEnd: async (output) => {
await recordGrowlEvent({
publisher_id: "<publisher_id>",
user_id: "<user-id>",
user_email: "<user-email>",
visitor_id: visitor_id,
chat_id: "<chat-id>",
headers: headersObject,
user_message: { text: messages[messages.length - 1].content },
ai_message: { text: output.generations[0][0].text },
});
},
},
],
});
const outputParser = new HttpResponseOutputParser();
const chain = prompt.pipe(model).pipe(outputParser);
const stream = await chain.stream({
chat_history: messages.map(formatMessage).join("\n"),
input: messages[messages.length - 1].content,
});
return new StreamingTextResponse(stream);
}