diff --git a/lightrag/llm/ollama.py b/lightrag/llm/ollama.py index 724716a9..5cd8a721 100644 --- a/lightrag/llm/ollama.py +++ b/lightrag/llm/ollama.py @@ -50,7 +50,7 @@ async def _ollama_model_if_cache( kwargs.pop("max_tokens", None) # kwargs.pop("response_format", None) # allow json host = kwargs.pop("host", None) - timeout = kwargs.pop("timeout", None) or 600 # Default timeout 600s (10분으로 증가) + timeout = kwargs.pop("timeout", None) or 600 # Default timeout 600s kwargs.pop("hashing_kv", None) api_key = kwargs.pop("api_key", None) headers = { @@ -146,7 +146,7 @@ async def ollama_embed(texts: list[str], embed_model, **kwargs) -> np.ndarray: headers["Authorization"] = f"Bearer {api_key}" host = kwargs.pop("host", None) - timeout = kwargs.pop("timeout", None) or 300 # Default time out 300s (5분으로 증가) + timeout = kwargs.pop("timeout", None) or 300 # Default time out 300s ollama_client = ollama.AsyncClient(host=host, timeout=timeout, headers=headers) @@ -168,4 +168,4 @@ async def ollama_embed(texts: list[str], embed_model, **kwargs) -> np.ndarray: await ollama_client._client.aclose() logger.debug("Successfully closed Ollama client after embed") except Exception as close_error: - logger.warning(f"Failed to close Ollama client after embed: {close_error}") \ No newline at end of file + logger.warning(f"Failed to close Ollama client after embed: {close_error}")