From a44f1f735d3750f15aa2f77cb7aa5d4c16aa95bf Mon Sep 17 00:00:00 2001 From: Kevin Hu Date: Fri, 20 Sep 2024 10:41:25 +0800 Subject: [PATCH] fix self deployed llm lost (#2510) ### What problem does this PR solve? #2509 ### Type of change - [x] Bug Fix (non-breaking change which fixes an issue) --- api/apps/llm_app.py | 3 ++- 1 file changed, 2 insertions(+), 1 deletion(-) diff --git a/api/apps/llm_app.py b/api/apps/llm_app.py index 0a72b3f64..6f26fdcf3 100644 --- a/api/apps/llm_app.py +++ b/api/apps/llm_app.py @@ -305,6 +305,7 @@ def my_llms(): @manager.route('/list', methods=['GET']) @login_required def list_app(): + self_deploied = ["Youdao","FastEmbed", "BAAI", "Ollama", "Xinference", "LocalAI", "LM-Studio"] model_type = request.args.get("model_type") try: objs = TenantLLMService.query(tenant_id=current_user.id) @@ -313,7 +314,7 @@ def list_app(): llms = [m.to_dict() for m in llms if m.status == StatusEnum.VALID.value] for m in llms: - m["available"] = m["fid"] in facts or m["llm_name"].lower() == "flag-embedding" or m["fid"] in ["Youdao","FastEmbed", "BAAI"] + m["available"] = m["fid"] in facts or m["llm_name"].lower() == "flag-embedding" or m["fid"] in self_deploied llm_set = set([m["llm_name"] for m in llms]) for o in objs: