SUTRA with Agno

SUTRA by TWO Platforms
SUTRA is a family of larges multi-lingual language (LMLMs) models pioneered by Two Platforms. SUTRA’s dual-transformer approach extends the power of both MoE and Dense AI language model architectures, delivering cost-efficient multilingual capabilities for over 50+ languages. It powers scalable AI applications for conversation, search, and advanced reasoning, ensuring high-performance across diverse languages, domains and applications.
Agno
Agno is a developer-friendly framework to build intelligent agents with memory, tools, and reasoning. It simplifies creating multimodal, agentic workflows with built-in FastAPI routes, chat UI, and performance monitoring.
Get Your API Keys
Before you begin, make sure you have:
- A SUTRA API key (Get yours at TWO AI's SUTRA API page)
- Basic familiarity with Python and Jupyter notebooks
This notebook is designed to run in Google Colab, so no local Python installation is required.
Sutra using Agno
Install Requirements
# SUTRA models are OpenAI API compatible
!pip install -qU openai agno tavily-python duckduckgo-search
Setup API Keys
import os
from google.colab import userdata
# Set the API key from Colab secrets
os.environ["SUTRA_API_KEY"] = userdata.get("SUTRA_API_KEY")
os.environ["TAVILY_API_KEY"] = userdata.get("TAVILY_API_KEY")
os.environ["OPENAI_API_KEY"] = userdata.get("OPENAI_API_KEY")
Initialize Agent with Sutra Model:
import os
from agno.agent import Agent
from agno.models.openai.like import OpenAILike
# Initialize the Agent with Sutra model via OpenAILike wrapper
agent = Agent(
model=OpenAILike(
id="sutra-v2",
api_key=os.getenv("SUTRA_API_KEY"),
base_url="https://api.two.ai/v2"
),
markdown=True
)
Send Message to Agent and Get Response:
# Send the message to the agent and get the response
response = agent.run("Who are you?")
# Print the response
print(response.content)
Define Multilingual Prompts:
from agno.agent import Agent
from agno.models.openai.like import OpenAILike
agent = Agent(
model=OpenAILike(
id="sutra-v2",
api_key=os.getenv("SUTRA_API_KEY"),
base_url="https://api.two.ai/v2"
),
markdown=True
)
prompts = [
"తెలుగులో ఒక కథ చెప్పు?", # Telugu
"Une histoire en français, s'il vous plaît.", # French
"Por favor, cuéntame una historia en español.", # Spanish
"कृपया हिंदी में एक कहानी सुनाइए।", # Hindi
"Bitte erzähle mir eine Geschichte auf Deutsch." # German
]
Process Prompts and Print Responses:
for prompt in prompts:
response = agent.run(prompt)
print(f"\n📝 Prompt: {prompt}\n📘 Response: {response.content}\n")
Initialize Sutra Model and Start Chatbot Interaction
from agno.agent import Agent
from agno.models.openai.like import OpenAILike
agent = Agent(
model=OpenAILike(
id="sutra-v2",
api_key=os.getenv("SUTRA_API_KEY"),
base_url="https://api.two.ai/v2"
),
markdown=True
)
print("💬 Chatbot ready (type 'exit' to quit)\n")
while True:
user_input = input("You: ")
if user_input.lower() == "exit":
print("Bot: Goodbye! 👋")
break
response = agent.run(user_input)
print("Bot:", response.content)
Initialize Agent with Sutra Model and Tavily Tool
import os
from agno.agent import Agent
from agno.models.openai.like import OpenAILike
from agno.tools.tavily import TavilyTools
# Initialize the agent with Sutra model and Tavily tool
agent = Agent(
model=OpenAILike(
id="sutra-v2",
api_key=os.getenv("SUTRA_API_KEY"),
base_url="https://api.two.ai/v2"
),
tools=[TavilyTools()],
description="You are a search agent capable of answering queries using Tavily search.",
instructions=[
"Always use Tavily search to fetch relevant information for the user's query.",
"Provide concise, informative, and relevant answers based on the search results."
],
show_tool_calls=True,
markdown=True
)
# Run a sample query using Tavily
response = agent.run("About SUTRA by TWO AI")
print(response.content)
Agentic RAG with LanceDB using Sutra LLM in Agno Framework
import os
from agno.agent import Agent
from agno.models.openai.like import OpenAILike
from agno.embedder.openai import OpenAIEmbedder
from agno.knowledge.pdf_url import PDFUrlKnowledgeBase
from agno.vectordb.lancedb import LanceDb, SearchType
# Step 1: Define the Knowledge Base from PDF URL
knowledge_base = PDFUrlKnowledgeBase(
urls=["https://agno-public.s3.amazonaws.com/recipes/ThaiRecipes.pdf"],
vector_db=LanceDb(
table_name="recipes",
uri="tmp/lancedb",
search_type=SearchType.vector,
embedder=OpenAIEmbedder(id="text-embedding-3-small"),
),
)
# Step 2: Load knowledge base (Run only once; comment in later runs)
knowledge_base.load()
# Step 3: Define the Agent using Sutra (OpenAI-compatible via OpenAILike)
agent = Agent(
model=OpenAILike(
id="sutra-v2",
api_key=os.getenv("SUTRA_API_KEY"),
base_url="https://api.two.ai/v2"
),
knowledge=knowledge_base,
search_knowledge=True, # Enables agentic RAG behavior
show_tool_calls=True, # Show tool usage by agent
markdown=True # Output in markdown
)
# Step 4: Ask your question
agent.print_response(
"How do I make chicken and galangal in coconut milk soup?", stream=True
)
🍿 PopcornPal - Movie Recommendation Agent 🎬
import os
from textwrap import dedent
from agno.agent import Agent
from agno.models.openai.like import OpenAILike
from agno.tools.tavily import TavilyTools
# ✅ Define the Agent
agent = Agent(
name="PopcornPal",
tools=[TavilyTools()],
model=OpenAILike(
id="sutra-v2",
api_key=os.getenv("SUTRA_API_KEY"),
base_url="https://api.two.ai/v2"
),
description=dedent("""\
You are PopcornPal, a passionate film expert helping users discover amazing movies. 🎬
Suggest detailed, high-quality recommendations based on user tastes and movie ratings.
"""),
instructions=dedent("""\
Steps:
1. Understand user's taste.
2. Use Tavily to search for recent movies with good ratings (IMDb > 7.5).
3. Recommend 5+ movies with: Title, Year, Genre, Rating, Summary, Director, Cast.
4. Format in Markdown table. Add 🎬 or genre emojis. Mention streaming if known.
"""),
markdown=True,
show_tool_calls=True,
add_datetime_to_instructions=True
)
# 🎯 Ask for recommendations
agent.print_response(
"Suggest thriller movies with IMDb rating above 8. I enjoyed Parasite, The Dark Knight, and Inception.",
stream=True
)
Latest News Summarizer
from agno.agent import Agent
from agno.models.openai.like import OpenAILike
from agno.tools.duckduckgo import DuckDuckGoTools
agent = Agent(
model=OpenAILike(
id="sutra-v2",
api_key=os.getenv("SUTRA_API_KEY"),
base_url="https://api.two.ai/v2"
),
description="You are an assistant that provides up-to-date news summaries.",
tools=[DuckDuckGoTools()],
show_tool_calls=True,
markdown=True
)
agent.print_response("Summarize the latest developments in renewable energy.", stream=True)