OpenRouter provides an OpenAI-compatible API for tool calling across multiple AI models and providers.

This allows you to use the same code to interact with different models that support function calling.

TypeScript

import { OpenAI } from 'openai';
import { AgentRPC } from 'agentrpc';

const openai = new OpenAI({
  apiKey: process.env.OPENROUTER_API_KEY,
  baseURL: 'https://openrouter.ai/api/v1',
});
const rpc = new AgentRPC({ apiSecret: process.env.AGENTRPC_API_SECRET });

const main = async () => {
  const tools = await rpc.OpenAI.getTools();
  const completion = await openai.chat.completions.create({
    model: 'google/gemini-2.0-flash-001',
    messages: [
      {
        role: 'user',
        content: 'What is the weather in Melbourne?',
      },
    ],
    tools,
  });

  const message = completion.choices[0]?.message;

  if (message?.tool_calls) {
    for (const toolCall of message.tool_calls) {
      console.log('Agent is calling Tool', toolCall.function.name);
      const result = await rpc.OpenAI.executeTool(toolCall);
      console.log(result);
    }
  }
};

main();

Python

import os

from agentrpc import AgentRPC
from openai import OpenAI


def main():
    agentrpc = AgentRPC(api_secret=os.environ.get("AGENTRPC_API_SECRET", ""))
    openai = OpenAI(
        base_url="https://openrouter.ai/api/v1",
        api_key=os.environ.get("OPENROUTER_API_KEY", ""),
    )

    tools = agentrpc.openai.completions.get_tools()

    completion = openai.chat.completions.create(
        model="google/gemini-2.0-flash-001",
        messages=[{"role": "user", "content": "What is the weather in Melbourne?"}],
        tools=tools,
    )

    if completion.choices[0].message.tool_calls:
        for tool_call in completion.choices[0].message.tool_calls:
            print("Agent is calling Tool", tool_call.function.name)
            result = agentrpc.openai.completions.execute_tool(tool_call)
            print(result)


if __name__ == "__main__":
    main()