diff --git a/catch-all/06_bots_telegram/09_ollama_bot/bot/func/interactions.py b/catch-all/06_bots_telegram/09_ollama_bot/bot/func/interactions.py index d157e9c..3433849 100644 --- a/catch-all/06_bots_telegram/09_ollama_bot/bot/func/interactions.py +++ b/catch-all/06_bots_telegram/09_ollama_bot/bot/func/interactions.py @@ -5,10 +5,11 @@ import aiohttp import json from aiogram import types -from aiohttp import ClientTimeout +from aiohttp import ClientTimeout, ClientResponseError, RequestInfo from asyncio import Lock from functools import wraps from dotenv import load_dotenv +from yarl import URL load_dotenv('.env') @@ -72,11 +73,25 @@ async def generate(payload: dict, modelname: str, prompt: str): if response.status != 200: - raise aiohttp.ClientResponseError( - - status=response.status, message=response.reason - + request_info = RequestInfo( + url=URL(url), + method='POST', + headers=response.request_info.headers, + real_url=response.request_info.real_url, ) + raise ClientResponseError( + request_info=request_info, + history=tuple(), + status=response.status, + message=response.reason, + headers=response.headers + ) + + # raise aiohttp.ClientResponseError( + + # status=response.status, message=response.reason + + # ) buffer = b"" diff --git a/catch-all/06_bots_telegram/09_ollama_bot/docker-compose.yml b/catch-all/06_bots_telegram/09_ollama_bot/docker-compose.yml index 461f6e3..22999fd 100644 --- a/catch-all/06_bots_telegram/09_ollama_bot/docker-compose.yml +++ b/catch-all/06_bots_telegram/09_ollama_bot/docker-compose.yml @@ -1,19 +1,25 @@ -# WORK IN PROGRESS -version: '3.8' + services: + ollama-tg: build: . container_name: ollama-tg restart: on-failure env_file: - ./.env - + networks: + - ollama-net + depends_on: + - ollama-api + ollama-api: image: ollama/ollama:latest container_name: ollama-server volumes: - ./ollama:/root/.ollama - + networks: + - ollama-net + # Descomenta para habilitar la GPU de NVIDIA # De lo contrario, se ejecuta solo en la CPU: @@ -28,3 +34,9 @@ services: restart: always ports: - '11434:11434' + environment: + - OLLAMA_MODELS=/ollama/models + +networks: + ollama-net: + driver: bridge