Function calling lets LLMs interact with your APIs. Here’s a production pattern:
const tools = [{
type: 'function',
function: {
name: 'get_weather',
description: 'Get current weather for a location',
parameters: {
type: 'object',
properties: {
location: { type: 'string', description: 'City name' },
unit: { type: 'string', enum: ['celsius', 'fahrenheit'] }
},
required: ['location']
}
}
}];
const functions = {
get_weather: async ({ location, unit = 'celsius' }) => {
// Your API call here
return { temp: 22, condition: 'sunny', location };
}
};
async function chat(message) {
const response = await openai.chat.completions.create({
model: 'gpt-4',
messages: [{ role: 'user', content: message }],
tools
});
const toolCalls = response.choices[0].message.tool_calls;
if (toolCalls) {
for (const call of toolCalls) {
const fn = functions[call.function.name];
const args = JSON.parse(call.function.arguments);
const result = await fn(args);
console.log(result);
}
}
}
