from fallom import models# Get assigned model for this sessionmodel = models.get("summarizer-config", session_id)# Returns: "gpt-4o" or "claude-3-5-sonnet" based on your config weightsagent = Agent(model=model)agent.run(message)
Pin to a specific config version, or use latest (default):
Copy
# Use latest version (default)model = models.get("my-config", session_id)# Pin to specific versionmodel = models.get("my-config", session_id, version=2)
import { models } from "@fallom/trace";// Get assigned model for this sessionconst model = await models.get("summarizer-config", sessionId);// Returns: "gpt-4o" or "claude-3-5-sonnet" based on your config weightsconst response = await openai.chat.completions.create({ model, ... });
A/B test between standard models and custom-hosted models (Novita, Together, Fireworks, etc.):
Copy
import { models } from "@fallom/trace";import { createOpenAI } from "@ai-sdk/openai";// Get model from A/B test configconst modelId = await models.get("vision-model", sessionId, { fallback: "gpt-4o",});// Route to the correct providerconst model = createModelClient(modelId);
import { models } from "@fallom/trace";import { createOpenAI } from "@ai-sdk/openai";import { generateText } from "ai";// Initialize once at startupmodels.init({ apiKey: process.env.FALLOM_API_KEY });async function chat(sessionId: string, message: string) { // Get A/B tested model const modelId = await models.get("my-agent", sessionId, { fallback: "gpt-4o-mini", }); // Route to correct provider const model = createModelClient(modelId); // Use with Vercel AI SDK const result = await generateText({ model, prompt: message, }); return result.text;}
Copy
from fallom import models# Initialize once at startupmodels.init(api_key=os.environ["FALLOM_API_KEY"])def chat(session_id: str, message: str): # Get A/B tested model model_id = models.get("my-agent", session_id, fallback="gpt-4o-mini") # Route to correct provider client, model_name = create_model_client(model_id) # Use with OpenAI SDK response = client.chat.completions.create( model=model_name, messages=[{"role": "user", "content": message}] ) return response.choices[0].message.content