import { startAsCurrentSpan } from "@atla-ai/insights-sdk-js";
const { span, endSpan } = startAsCurrentSpan("my-llm-generation");
try {
// Run my LLM generation via an unsupported framework.
const inputMessages = [{ role: "user", content: "What is the capital of France?" }];
const tools = [
{
type: "function",
function: {
name: "get_capital",
parameters: { type: "object", properties: { country: { type: "string" } } },
},
},
];
const result = await myClient.chat.completions.create({ messages: inputMessages, tools });
// Manually record LLM generation.
span.recordGeneration({
inputMessages,
outputMessages: result.choices.map(choice => choice.message),
tools,
});
} finally {
endSpan();
}