Compare commits

..

3 Commits

Author SHA1 Message Date
e1bbc92162 added fastapi 2026-01-23 10:34:50 +03:00
13c9b97073 Merge branch 'master' of https://git.kurbet.ru/ziabric/hestia 2026-01-23 09:32:26 +03:00
91cdb1328b init 2026-01-23 09:27:23 +03:00
6 changed files with 151 additions and 1 deletions

2
.gitignore vendored
View File

@@ -173,4 +173,4 @@ cython_debug/
# PyPI configuration file
.pypirc
.idea/**

105
main.py Normal file
View File

@@ -0,0 +1,105 @@
import asyncio
import json
import urllib.request
from contextlib import asynccontextmanager
from typing import Optional
from fastapi import FastAPI
from pydantic import BaseModel
from mcp_agent.app import MCPApp
from mcp_agent.agents.agent import Agent
from mcp_agent.workflows.llm.augmented_llm_openai import OpenAIAugmentedLLM
OLLAMA_MODEL = "qwen3:1.7b"
OLLAMA_KEEP_ALIVE = "10h"
AGENT_INSTRUCTION = (
"Ты ассистент по погоде. "
"Если нужны актуальные данные — используй доступные инструменты."
"ОТВЕЧАЙ ТОЛЬКО НА РУССКОМ ЯЗЫКЕ!"
"Не выводи блоки <think> или внутренние рассуждения, отвечай только итогом."
)
MCP_SERVER_NAMES = ["weather"]
def warmup_ollama(model: str, keep_alive: str = "10h") -> None:
payload = {
"model": model,
"prompt": "",
"stream": False,
"keep_alive": keep_alive,
}
req = urllib.request.Request(
"http://localhost:11434/api/generate",
data=json.dumps(payload).encode("utf-8"),
headers={"Content-Type": "application/json"},
method="POST",
)
try:
urllib.request.urlopen(req, timeout=5).read()
except Exception:
pass
class AskRequest(BaseModel):
question: str
class AskResponse(BaseModel):
answer: str
app_mcp = MCPApp(name="weather_agent")
_agent: Optional[Agent] = None
_llm: Optional[OpenAIAugmentedLLM] = None
_lock: asyncio.Lock = asyncio.Lock()
@asynccontextmanager
async def lifespan(app: FastAPI):
"""
Startup:
- прогреваем Ollama
- поднимаем MCPApp
- создаём Agent и подключаем LLM один раз
Shutdown:
- корректно закрываем agent и MCPApp
"""
global _agent, _llm
warmup_ollama(OLLAMA_MODEL, keep_alive=OLLAMA_KEEP_ALIVE)
async with app_mcp.run():
_agent = Agent(
name="weather",
instruction=AGENT_INSTRUCTION,
server_names=MCP_SERVER_NAMES,
)
async with _agent:
_llm = await _agent.attach_llm(OpenAIAugmentedLLM)
yield
api = FastAPI(title="Weather MCP Agent API", lifespan=lifespan)
@api.post("/ask", response_model=AskResponse)
async def ask(req: AskRequest) -> AskResponse:
"""
Принимает вопрос, возвращает краткий ответ на русском.
"""
if _llm is None:
return AskResponse(answer="Сервис не готов. Попробуйте позже.")
async with _lock:
answer = await _llm.generate_str(req.question)
return AskResponse(answer=answer)
@api.get("/health")
async def health():
return {"status": "ok"}

11
mcp_agent.config.yaml Normal file
View File

@@ -0,0 +1,11 @@
execution_engine: asyncio
mcp:
servers:
weather:
command: "python"
args: ["weather_mcp.py"]
openai:
base_url: "http://localhost:11434/v1"
default_model: "llama3.2:1b"

2
mcp_agent.secrets.yaml Normal file
View File

@@ -0,0 +1,2 @@
openai:
api_key: "ollama"

3
requirements.txt Normal file
View File

@@ -0,0 +1,3 @@
mcp-agent[openai]
mcp
pydantic

29
weather_mcp.py Normal file
View File

@@ -0,0 +1,29 @@
from mcp.server.fastmcp import FastMCP
from pydantic import BaseModel
class WeatherResult(BaseModel):
city: str
temperature: int
units: str
condition: str
server = FastMCP(name="weather")
@server.tool(
name="get_weather",
description="Возвращает актуальную погоду по городу",
)
async def get_weather(city: str, units: str = "metric") -> WeatherResult:
return WeatherResult(
city=city,
temperature=18,
units=units,
condition="partly cloudy",
)
if __name__ == "__main__":
server.run()