Fix linting

This commit is contained in:
yangdx 2025-05-14 01:14:45 +08:00
parent 5c9fd9c4d2
commit 0e26cbebd0
2 changed files with 19 additions and 12 deletions

View File

@ -26,9 +26,7 @@ def configure_logging():
# Get log directory path from environment variable or use current directory
log_dir = os.getenv("LOG_DIR", os.getcwd())
log_file_path = os.path.abspath(
os.path.join(log_dir, "lightrag_ollama_demo.log")
)
log_file_path = os.path.abspath(os.path.join(log_dir, "lightrag_ollama_demo.log"))
print(f"\nLightRAG compatible demo log file: {log_file_path}\n")
os.makedirs(os.path.dirname(log_file_path), exist_ok=True)
@ -211,6 +209,7 @@ async def main():
await rag.llm_response_cache.index_done_callback()
await rag.finalize_storages()
if __name__ == "__main__":
# Configure logging before running the main function
configure_logging()

View File

@ -62,9 +62,9 @@ async def _ollama_model_if_cache(
}
if api_key:
headers["Authorization"] = f"Bearer {api_key}"
ollama_client = ollama.AsyncClient(host=host, timeout=timeout, headers=headers)
try:
messages = []
if system_prompt:
@ -106,15 +106,21 @@ async def _ollama_model_if_cache(
await ollama_client._client.aclose()
logger.debug("Successfully closed Ollama client after exception")
except Exception as close_error:
logger.warning(f"Failed to close Ollama client after exception: {close_error}")
logger.warning(
f"Failed to close Ollama client after exception: {close_error}"
)
raise e
finally:
if not stream:
try:
await ollama_client._client.aclose()
logger.debug("Successfully closed Ollama client for non-streaming response")
logger.debug(
"Successfully closed Ollama client for non-streaming response"
)
except Exception as close_error:
logger.warning(f"Failed to close Ollama client in finally block: {close_error}")
logger.warning(
f"Failed to close Ollama client in finally block: {close_error}"
)
async def ollama_model_complete(
@ -141,12 +147,12 @@ async def ollama_embed(texts: list[str], embed_model, **kwargs) -> np.ndarray:
}
if api_key:
headers["Authorization"] = f"Bearer {api_key}"
host = kwargs.pop("host", None)
timeout = kwargs.pop("timeout", None) or 90 # Default time out 90s
ollama_client = ollama.AsyncClient(host=host, timeout=timeout, headers=headers)
try:
data = await ollama_client.embed(model=embed_model, input=texts)
return np.array(data["embeddings"])
@ -156,7 +162,9 @@ async def ollama_embed(texts: list[str], embed_model, **kwargs) -> np.ndarray:
await ollama_client._client.aclose()
logger.debug("Successfully closed Ollama client after exception in embed")
except Exception as close_error:
logger.warning(f"Failed to close Ollama client after exception in embed: {close_error}")
logger.warning(
f"Failed to close Ollama client after exception in embed: {close_error}"
)
raise e
finally:
try: