From 2aa00827439fef84447640cc98837efb600ca122 Mon Sep 17 00:00:00 2001 From: Barabashka Date: Tue, 21 Apr 2026 12:02:31 +0300 Subject: [PATCH] =?UTF-8?q?=D0=98=D0=BD=D0=B8=D1=86=D0=B8=D0=B0=D0=BB?= =?UTF-8?q?=D0=B8=D0=B7=D0=B8=D1=80=D0=BE=D0=B2=D0=B0=D1=82=D1=8C=20=D0=B1?= =?UTF-8?q?=D0=B0=D0=B7=D0=BE=D0=B2=D1=8B=D0=B9=20=D0=BA=D0=B0=D1=80=D0=BA?= =?UTF-8?q?=D0=B0=D1=81=20MVP=20=D1=81=20=D1=87=D0=B0=D1=82-=D0=B0=D0=B3?= =?UTF-8?q?=D0=B5=D0=BD=D1=82=D0=BE=D0=BC=20=D0=BD=D0=B0=20Agno=20=D0=B8?= =?UTF-8?q?=20Ollama.?= MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit Добавить входную точку CLI, конфигурацию через .env и базовую документацию для быстрого локального запуска. --- .env.example | 6 +++++ .gitignore | 27 +++++++++++++++++++++++ README.md | 40 ++++++++++++++++++++++++++++++++++ requirements.txt | 5 +++++ src/__init__.py | 1 + src/agent_runner.py | 53 +++++++++++++++++++++++++++++++++++++++++++++ src/main.py | 43 ++++++++++++++++++++++++++++++++++++ 7 files changed, 175 insertions(+) create mode 100644 .env.example create mode 100644 .gitignore create mode 100644 README.md create mode 100644 requirements.txt create mode 100644 src/__init__.py create mode 100644 src/agent_runner.py create mode 100644 src/main.py diff --git a/.env.example b/.env.example new file mode 100644 index 0000000..ac8600d --- /dev/null +++ b/.env.example @@ -0,0 +1,6 @@ +OLLAMA_MODEL_ID=gemma4:31b +OLLAMA_HOST=http://localhost:11435 +OLLAMA_TEMPERATURE=0 +AGENT_MARKDOWN=false +AGENT_DEBUG_MODE=true +AGENT_INSTRUCTIONS=You are a helpful assistant. Answer briefly and clearly. diff --git a/.gitignore b/.gitignore new file mode 100644 index 0000000..d57556c --- /dev/null +++ b/.gitignore @@ -0,0 +1,27 @@ +# Python cache and bytecode +__pycache__/ +*.py[cod] +*$py.class + +# Virtual environments +.venv/ +venv/ + +# Environment files +.env + +# Build and packaging +build/ +dist/ +*.egg-info/ + +# Test and tool caches +.pytest_cache/ +.mypy_cache/ +.ruff_cache/ + +# IDE and OS files +.idea/ +.vscode/ +.DS_Store +.cursor diff --git a/README.md b/README.md new file mode 100644 index 0000000..15db035 --- /dev/null +++ b/README.md @@ -0,0 +1,40 @@ +# Prisma Platform MVP + +Minimal chat agent on Agno + Ollama. + +## Current structure + +```text +prisma_platform/ +├── .env +├── .env.example +├── requirements.txt +└── src/ + ├── __init__.py + ├── agent_runner.py + └── main.py +``` + +## Setup + +```bash +python -m venv .venv +source .venv/bin/activate +pip install -r requirements.txt +cp .env.example .env +``` + +## Run + +Interactive chat mode: + +```bash +python -m src.main +``` + +Single message mode: + +```bash +python -m src.main --message "Привет, что ты умеешь?" +``` + diff --git a/requirements.txt b/requirements.txt new file mode 100644 index 0000000..0b6454e --- /dev/null +++ b/requirements.txt @@ -0,0 +1,5 @@ +agno +python-dotenv +ollama +socksio +openai diff --git a/src/__init__.py b/src/__init__.py new file mode 100644 index 0000000..2cb516d --- /dev/null +++ b/src/__init__.py @@ -0,0 +1 @@ +# Base package for agent entrypoint. diff --git a/src/agent_runner.py b/src/agent_runner.py new file mode 100644 index 0000000..2366f20 --- /dev/null +++ b/src/agent_runner.py @@ -0,0 +1,53 @@ +import os + +from agno.agent import Agent +from agno.models.ollama import Ollama + + +def _env_bool(name: str, default: bool) -> bool: + value = os.getenv(name) + if value is None: + return default + return value.strip().lower() in {"1", "true", "yes", "on"} + + +def _env_float(name: str, default: float) -> float: + value = os.getenv(name) + if value is None: + return default + return float(value) + + +_agent: Agent | None = None + + +def get_agent() -> Agent: + global _agent + + if _agent is not None: + return _agent + + model_id = os.getenv("OLLAMA_MODEL_ID", "gemma4:31b") + ollama_host = os.getenv("OLLAMA_HOST", "http://localhost:11435") + temperature = _env_float("OLLAMA_TEMPERATURE", 0.0) + markdown = _env_bool("AGENT_MARKDOWN", False) + debug_mode = _env_bool("AGENT_DEBUG_MODE", True) + instructions = os.getenv( + "AGENT_INSTRUCTIONS", + "You are a helpful assistant. Answer briefly and clearly.", + ) + + llm = Ollama(id=model_id, host=ollama_host, options={"temperature": temperature}) + _agent = Agent( + model=llm, + markdown=markdown, + instructions=instructions, + debug_mode=debug_mode, + ) + return _agent + + +async def run_agent(message: str) -> str: + agent = get_agent() + response = await agent.arun(message) + return str(response.content) diff --git a/src/main.py b/src/main.py new file mode 100644 index 0000000..109462a --- /dev/null +++ b/src/main.py @@ -0,0 +1,43 @@ +import argparse +import asyncio + +from dotenv import load_dotenv + +from src.agent_runner import run_agent + + +def build_parser() -> argparse.ArgumentParser: + parser = argparse.ArgumentParser( + description="Run base chat agent.", + ) + parser.add_argument( + "--message", + help="Single message mode. If omitted, starts interactive chat.", + ) + return parser + + +async def _main() -> None: + load_dotenv() + args = build_parser().parse_args() + + if args.message: + result = await run_agent(args.message) + print(result) + return + + print("Chat mode started. Type 'exit' or 'quit' to stop.") + while True: + user_message = input("you> ").strip() + if not user_message: + continue + if user_message.lower() in {"exit", "quit"}: + print("Bye.") + break + + result = await run_agent(user_message) + print(f"agent> {result}") + + +if __name__ == "__main__": + asyncio.run(_main())