from agno.agent import Agent, RunOutput # noqafrom agno.models.ollama import Ollamaagent = Agent(model=Ollama(id="llama3.1:8b"), markdown=True)# Get the response in a variable# run: RunOutput = agent.run("Share a 2 sentence horror story")# print(run.content)# Print the response in the terminalagent.print_response("Share a 2 sentence horror story")
For easier setup without local installation, you can use Ollama Cloud with your API key:
Copy
Ask AI
from agno.agent import Agentfrom agno.models.ollama import Ollama# No local setup required - just set OLLAMA_API_KEYagent = Agent(model=Ollama(id="gpt-oss:120b-cloud", host="https://ollama.com"))agent.print_response("Share a 2 sentence horror story")