commit 2aa00827439fef84447640cc98837efb600ca122 Author: Barabashka Date: Tue Apr 21 12:02:31 2026 +0300 Инициализировать базовый каркас MVP с чат-агентом на Agno и Ollama. Добавить входную точку CLI, конфигурацию через .env и базовую документацию для быстрого локального запуска. diff --git a/.env.example b/.env.example new file mode 100644 index 0000000..ac8600d --- /dev/null +++ b/.env.example @@ -0,0 +1,6 @@ +OLLAMA_MODEL_ID=gemma4:31b +OLLAMA_HOST=http://localhost:11435 +OLLAMA_TEMPERATURE=0 +AGENT_MARKDOWN=false +AGENT_DEBUG_MODE=true +AGENT_INSTRUCTIONS=You are a helpful assistant. Answer briefly and clearly. diff --git a/.gitignore b/.gitignore new file mode 100644 index 0000000..d57556c --- /dev/null +++ b/.gitignore @@ -0,0 +1,27 @@ +# Python cache and bytecode +__pycache__/ +*.py[cod] +*$py.class + +# Virtual environments +.venv/ +venv/ + +# Environment files +.env + +# Build and packaging +build/ +dist/ +*.egg-info/ + +# Test and tool caches +.pytest_cache/ +.mypy_cache/ +.ruff_cache/ + +# IDE and OS files +.idea/ +.vscode/ +.DS_Store +.cursor diff --git a/README.md b/README.md new file mode 100644 index 0000000..15db035 --- /dev/null +++ b/README.md @@ -0,0 +1,40 @@ +# Prisma Platform MVP + +Minimal chat agent on Agno + Ollama. + +## Current structure + +```text +prisma_platform/ +├── .env +├── .env.example +├── requirements.txt +└── src/ + ├── __init__.py + ├── agent_runner.py + └── main.py +``` + +## Setup + +```bash +python -m venv .venv +source .venv/bin/activate +pip install -r requirements.txt +cp .env.example .env +``` + +## Run + +Interactive chat mode: + +```bash +python -m src.main +``` + +Single message mode: + +```bash +python -m src.main --message "Привет, что ты умеешь?" +``` + diff --git a/requirements.txt b/requirements.txt new file mode 100644 index 0000000..0b6454e --- /dev/null +++ b/requirements.txt @@ -0,0 +1,5 @@ +agno +python-dotenv +ollama +socksio +openai diff --git a/src/__init__.py b/src/__init__.py new file mode 100644 index 0000000..2cb516d --- /dev/null +++ b/src/__init__.py @@ -0,0 +1 @@ +# Base package for agent entrypoint. diff --git a/src/agent_runner.py b/src/agent_runner.py new file mode 100644 index 0000000..2366f20 --- /dev/null +++ b/src/agent_runner.py @@ -0,0 +1,53 @@ +import os + +from agno.agent import Agent +from agno.models.ollama import Ollama + + +def _env_bool(name: str, default: bool) -> bool: + value = os.getenv(name) + if value is None: + return default + return value.strip().lower() in {"1", "true", "yes", "on"} + + +def _env_float(name: str, default: float) -> float: + value = os.getenv(name) + if value is None: + return default + return float(value) + + +_agent: Agent | None = None + + +def get_agent() -> Agent: + global _agent + + if _agent is not None: + return _agent + + model_id = os.getenv("OLLAMA_MODEL_ID", "gemma4:31b") + ollama_host = os.getenv("OLLAMA_HOST", "http://localhost:11435") + temperature = _env_float("OLLAMA_TEMPERATURE", 0.0) + markdown = _env_bool("AGENT_MARKDOWN", False) + debug_mode = _env_bool("AGENT_DEBUG_MODE", True) + instructions = os.getenv( + "AGENT_INSTRUCTIONS", + "You are a helpful assistant. Answer briefly and clearly.", + ) + + llm = Ollama(id=model_id, host=ollama_host, options={"temperature": temperature}) + _agent = Agent( + model=llm, + markdown=markdown, + instructions=instructions, + debug_mode=debug_mode, + ) + return _agent + + +async def run_agent(message: str) -> str: + agent = get_agent() + response = await agent.arun(message) + return str(response.content) diff --git a/src/main.py b/src/main.py new file mode 100644 index 0000000..109462a --- /dev/null +++ b/src/main.py @@ -0,0 +1,43 @@ +import argparse +import asyncio + +from dotenv import load_dotenv + +from src.agent_runner import run_agent + + +def build_parser() -> argparse.ArgumentParser: + parser = argparse.ArgumentParser( + description="Run base chat agent.", + ) + parser.add_argument( + "--message", + help="Single message mode. If omitted, starts interactive chat.", + ) + return parser + + +async def _main() -> None: + load_dotenv() + args = build_parser().parse_args() + + if args.message: + result = await run_agent(args.message) + print(result) + return + + print("Chat mode started. Type 'exit' or 'quit' to stop.") + while True: + user_message = input("you> ").strip() + if not user_message: + continue + if user_message.lower() in {"exit", "quit"}: + print("Bye.") + break + + result = await run_agent(user_message) + print(f"agent> {result}") + + +if __name__ == "__main__": + asyncio.run(_main())