feat: add new LLM provider Jiekou.AI (#11300)

### What problem does this PR solve?

_Briefly describe what this PR aims to solve. Include background context
that will help reviewers understand the purpose of the PR._

### Type of change

- [x] New Feature (non-breaking change which adds functionality)

Co-authored-by: Jason <ggbbddjm@gmail.com>
This commit is contained in:
cnJasonZ 2025-11-17 19:47:46 +08:00 committed by GitHub
parent d8f413a885
commit 3fcf2ee54c
No known key found for this signature in database
GPG Key ID: B5690EEEBB952194
10 changed files with 663 additions and 1 deletions

View File

@ -4839,6 +4839,639 @@
"is_tools": false
}
]
},
{
"name": "JieKou.AI",
"logo": "",
"tags": "LLM,TEXT EMBEDDING,TEXT RE-RANK",
"status": "1",
"llm": [
{
"llm_name": "Sao10K/L3-8B-Stheno-v3.2",
"tags": "LLM,CHAT,8K",
"max_tokens": 8192,
"model_type": "chat",
"is_tools": true
},
{
"llm_name": "baichuan/baichuan-m2-32b",
"tags": "LLM,CHAT,131K",
"max_tokens": 131072,
"model_type": "chat",
"is_tools": true
},
{
"llm_name": "baidu/ernie-4.5-300b-a47b-paddle",
"tags": "LLM,CHAT,123K",
"max_tokens": 123000,
"model_type": "chat",
"is_tools": true
},
{
"llm_name": "baidu/ernie-4.5-vl-424b-a47b",
"tags": "LLM,CHAT,123K",
"max_tokens": 123000,
"model_type": "chat",
"is_tools": true
},
{
"llm_name": "claude-3-5-haiku-20241022",
"tags": "LLM,CHAT,200K",
"max_tokens": 200000,
"model_type": "chat",
"is_tools": true
},
{
"llm_name": "claude-3-5-sonnet-20241022",
"tags": "LLM,CHAT,200K",
"max_tokens": 200000,
"model_type": "chat",
"is_tools": true
},
{
"llm_name": "claude-3-7-sonnet-20250219",
"tags": "LLM,CHAT,200K",
"max_tokens": 200000,
"model_type": "chat",
"is_tools": true
},
{
"llm_name": "claude-3-haiku-20240307",
"tags": "LLM,CHAT,200K",
"max_tokens": 200000,
"model_type": "chat",
"is_tools": true
},
{
"llm_name": "claude-haiku-4-5-20251001",
"tags": "LLM,CHAT,20K,IMAGE2TEXT",
"max_tokens": 20000,
"model_type": "image2text",
"is_tools": true
},
{
"llm_name": "claude-opus-4-1-20250805",
"tags": "LLM,CHAT,200K",
"max_tokens": 200000,
"model_type": "chat",
"is_tools": true
},
{
"llm_name": "claude-opus-4-20250514",
"tags": "LLM,CHAT,200K",
"max_tokens": 200000,
"model_type": "chat",
"is_tools": true
},
{
"llm_name": "claude-sonnet-4-20250514",
"tags": "LLM,CHAT,200K",
"max_tokens": 200000,
"model_type": "chat",
"is_tools": true
},
{
"llm_name": "claude-sonnet-4-5-20250929",
"tags": "LLM,CHAT,200K,IMAGE2TEXT",
"max_tokens": 200000,
"model_type": "image2text",
"is_tools": true
},
{
"llm_name": "deepseek/deepseek-r1-0528",
"tags": "LLM,CHAT,163K",
"max_tokens": 163840,
"model_type": "chat",
"is_tools": true
},
{
"llm_name": "deepseek/deepseek-v3-0324",
"tags": "LLM,CHAT,163K",
"max_tokens": 163840,
"model_type": "chat",
"is_tools": true
},
{
"llm_name": "deepseek/deepseek-v3.1",
"tags": "LLM,CHAT,163K",
"max_tokens": 163840,
"model_type": "chat",
"is_tools": true
},
{
"llm_name": "doubao-1-5-pro-32k-250115",
"tags": "LLM,CHAT,128K",
"max_tokens": 128000,
"model_type": "chat",
"is_tools": true
},
{
"llm_name": "doubao-1.5-pro-32k-character-250715",
"tags": "LLM,CHAT,200K",
"max_tokens": 200000,
"model_type": "chat",
"is_tools": true
},
{
"llm_name": "gemini-2.0-flash-20250609",
"tags": "LLM,CHAT,1M",
"max_tokens": 1048576,
"model_type": "chat",
"is_tools": true
},
{
"llm_name": "gemini-2.0-flash-lite",
"tags": "LLM,CHAT,1M",
"max_tokens": 1048576,
"model_type": "chat",
"is_tools": true
},
{
"llm_name": "gemini-2.5-flash",
"tags": "LLM,CHAT,1M",
"max_tokens": 1048576,
"model_type": "chat",
"is_tools": true
},
{
"llm_name": "gemini-2.5-flash-lite",
"tags": "LLM,CHAT,1M",
"max_tokens": 1048576,
"model_type": "chat",
"is_tools": true
},
{
"llm_name": "gemini-2.5-flash-lite-preview-06-17",
"tags": "LLM,CHAT,1M",
"max_tokens": 1048576,
"model_type": "chat",
"is_tools": true
},
{
"llm_name": "gemini-2.5-flash-lite-preview-09-2025",
"tags": "LLM,CHAT,1M,IMAGE2TEXT",
"max_tokens": 1048576,
"model_type": "image2text",
"is_tools": true
},
{
"llm_name": "gemini-2.5-flash-preview-05-20",
"tags": "LLM,CHAT,1M",
"max_tokens": 1048576,
"model_type": "chat",
"is_tools": true
},
{
"llm_name": "gemini-2.5-pro",
"tags": "LLM,CHAT,1M",
"max_tokens": 1048576,
"model_type": "chat",
"is_tools": true
},
{
"llm_name": "gemini-2.5-pro-preview-06-05",
"tags": "LLM,CHAT,1M",
"max_tokens": 1048576,
"model_type": "chat",
"is_tools": true
},
{
"llm_name": "google/gemma-3-12b-it",
"tags": "LLM,CHAT,131K",
"max_tokens": 131072,
"model_type": "chat",
"is_tools": true
},
{
"llm_name": "google/gemma-3-27b-it",
"tags": "LLM,CHAT,32K",
"max_tokens": 32768,
"model_type": "chat",
"is_tools": true
},
{
"llm_name": "gpt-4.1",
"tags": "LLM,CHAT,1M",
"max_tokens": 1047576,
"model_type": "chat",
"is_tools": true
},
{
"llm_name": "gpt-4.1-mini",
"tags": "LLM,CHAT,1M",
"max_tokens": 1047576,
"model_type": "chat",
"is_tools": true
},
{
"llm_name": "gpt-4.1-nano",
"tags": "LLM,CHAT,1M",
"max_tokens": 1047576,
"model_type": "chat",
"is_tools": true
},
{
"llm_name": "gpt-4o",
"tags": "LLM,CHAT,131K",
"max_tokens": 131072,
"model_type": "chat",
"is_tools": true
},
{
"llm_name": "gpt-4o-mini",
"tags": "LLM,CHAT,131K",
"max_tokens": 131072,
"model_type": "chat",
"is_tools": true
},
{
"llm_name": "gpt-5",
"tags": "LLM,CHAT,400K",
"max_tokens": 400000,
"model_type": "chat",
"is_tools": true
},
{
"llm_name": "gpt-5-chat-latest",
"tags": "LLM,CHAT,400K",
"max_tokens": 400000,
"model_type": "chat",
"is_tools": true
},
{
"llm_name": "gpt-5-codex",
"tags": "LLM,CHAT,400K,IMAGE2TEXT",
"max_tokens": 400000,
"model_type": "image2text",
"is_tools": true
},
{
"llm_name": "gpt-5-mini",
"tags": "LLM,CHAT,400K",
"max_tokens": 400000,
"model_type": "chat",
"is_tools": true
},
{
"llm_name": "gpt-5-nano",
"tags": "LLM,CHAT,400K",
"max_tokens": 400000,
"model_type": "chat",
"is_tools": true
},
{
"llm_name": "gpt-5-pro",
"tags": "LLM,CHAT,400K,IMAGE2TEXT",
"max_tokens": 400000,
"model_type": "image2text",
"is_tools": true
},
{
"llm_name": "gpt-5.1",
"tags": "LLM,CHAT,400K",
"max_tokens": 400000,
"model_type": "chat",
"is_tools": true
},
{
"llm_name": "gpt-5.1-chat-latest",
"tags": "LLM,CHAT,128K",
"max_tokens": 128000,
"model_type": "chat",
"is_tools": true
},
{
"llm_name": "gpt-5.1-codex",
"tags": "LLM,CHAT,400K",
"max_tokens": 400000,
"model_type": "chat",
"is_tools": true
},
{
"llm_name": "grok-3",
"tags": "LLM,CHAT,131K",
"max_tokens": 131072,
"model_type": "chat",
"is_tools": true
},
{
"llm_name": "grok-3-mini",
"tags": "LLM,CHAT,131K",
"max_tokens": 131072,
"model_type": "chat",
"is_tools": true
},
{
"llm_name": "grok-4-0709",
"tags": "LLM,CHAT,256K",
"max_tokens": 256000,
"model_type": "chat",
"is_tools": true
},
{
"llm_name": "grok-4-fast-non-reasoning",
"tags": "LLM,CHAT,2M,IMAGE2TEXT",
"max_tokens": 2000000,
"model_type": "image2text",
"is_tools": true
},
{
"llm_name": "grok-4-fast-reasoning",
"tags": "LLM,CHAT,2M,IMAGE2TEXT",
"max_tokens": 2000000,
"model_type": "image2text",
"is_tools": true
},
{
"llm_name": "grok-code-fast-1",
"tags": "LLM,CHAT,256K",
"max_tokens": 256000,
"model_type": "chat",
"is_tools": true
},
{
"llm_name": "gryphe/mythomax-l2-13b",
"tags": "LLM,CHAT,4K",
"max_tokens": 4096,
"model_type": "chat",
"is_tools": false
},
{
"llm_name": "meta-llama/llama-3.1-8b-instruct",
"tags": "LLM,CHAT,16K",
"max_tokens": 16384,
"model_type": "chat",
"is_tools": false
},
{
"llm_name": "meta-llama/llama-3.2-3b-instruct",
"tags": "LLM,CHAT,32K",
"max_tokens": 32768,
"model_type": "chat",
"is_tools": true
},
{
"llm_name": "meta-llama/llama-3.3-70b-instruct",
"tags": "LLM,CHAT,131K",
"max_tokens": 131072,
"model_type": "chat",
"is_tools": true
},
{
"llm_name": "meta-llama/llama-4-maverick-17b-128e-instruct-fp8",
"tags": "LLM,CHAT,1M",
"max_tokens": 1048576,
"model_type": "chat",
"is_tools": true
},
{
"llm_name": "meta-llama/llama-4-scout-17b-16e-instruct",
"tags": "LLM,CHAT,131K",
"max_tokens": 131072,
"model_type": "chat",
"is_tools": true
},
{
"llm_name": "minimaxai/minimax-m1-80k",
"tags": "LLM,CHAT,1M",
"max_tokens": 1000000,
"model_type": "chat",
"is_tools": true
},
{
"llm_name": "mistralai/mistral-7b-instruct",
"tags": "LLM,CHAT,32K",
"max_tokens": 32768,
"model_type": "chat",
"is_tools": false
},
{
"llm_name": "mistralai/mistral-nemo",
"tags": "LLM,CHAT,60K",
"max_tokens": 60288,
"model_type": "chat",
"is_tools": true
},
{
"llm_name": "moonshotai/kimi-k2-0905",
"tags": "LLM,CHAT,262K",
"max_tokens": 262144,
"model_type": "chat",
"is_tools": true
},
{
"llm_name": "moonshotai/kimi-k2-instruct",
"tags": "LLM,CHAT,131K",
"max_tokens": 131072,
"model_type": "chat",
"is_tools": true
},
{
"llm_name": "o1",
"tags": "LLM,CHAT,131K",
"max_tokens": 131072,
"model_type": "chat",
"is_tools": true
},
{
"llm_name": "o1-mini",
"tags": "LLM,CHAT,131K",
"max_tokens": 131072,
"model_type": "chat",
"is_tools": true
},
{
"llm_name": "o3",
"tags": "LLM,CHAT,131K",
"max_tokens": 131072,
"model_type": "chat",
"is_tools": true
},
{
"llm_name": "o3-mini",
"tags": "LLM,CHAT,131K",
"max_tokens": 131072,
"model_type": "chat",
"is_tools": true
},
{
"llm_name": "openai/gpt-oss-120b",
"tags": "LLM,CHAT,131K",
"max_tokens": 131072,
"model_type": "chat",
"is_tools": true
},
{
"llm_name": "openai/gpt-oss-20b",
"tags": "LLM,CHAT,131K",
"max_tokens": 131072,
"model_type": "chat",
"is_tools": true
},
{
"llm_name": "qwen/qwen-2.5-72b-instruct",
"tags": "LLM,CHAT,32K",
"max_tokens": 32000,
"model_type": "chat",
"is_tools": true
},
{
"llm_name": "qwen/qwen-mt-plus",
"tags": "LLM,CHAT,4K",
"max_tokens": 4096,
"model_type": "chat",
"is_tools": false
},
{
"llm_name": "qwen/qwen2.5-7b-instruct",
"tags": "LLM,CHAT,32K",
"max_tokens": 32000,
"model_type": "chat",
"is_tools": true
},
{
"llm_name": "qwen/qwen2.5-vl-72b-instruct",
"tags": "LLM,CHAT,32K",
"max_tokens": 32768,
"model_type": "chat",
"is_tools": false
},
{
"llm_name": "qwen/qwen3-235b-a22b-fp8",
"tags": "LLM,CHAT,40K",
"max_tokens": 40960,
"model_type": "chat",
"is_tools": false
},
{
"llm_name": "qwen/qwen3-235b-a22b-instruct-2507",
"tags": "LLM,CHAT,131K",
"max_tokens": 131072,
"model_type": "chat",
"is_tools": true
},
{
"llm_name": "qwen/qwen3-235b-a22b-thinking-2507",
"tags": "LLM,CHAT,131K",
"max_tokens": 131072,
"model_type": "chat",
"is_tools": true
},
{
"llm_name": "qwen/qwen3-30b-a3b-fp8",
"tags": "LLM,CHAT,40K",
"max_tokens": 40960,
"model_type": "chat",
"is_tools": false
},
{
"llm_name": "qwen/qwen3-32b-fp8",
"tags": "LLM,CHAT,40K",
"max_tokens": 40960,
"model_type": "chat",
"is_tools": false
},
{
"llm_name": "qwen/qwen3-8b-fp8",
"tags": "LLM,CHAT,128K",
"max_tokens": 128000,
"model_type": "chat",
"is_tools": false
},
{
"llm_name": "qwen/qwen3-coder-480b-a35b-instruct",
"tags": "LLM,CHAT,262K",
"max_tokens": 262144,
"model_type": "chat",
"is_tools": true
},
{
"llm_name": "qwen/qwen3-next-80b-a3b-instruct",
"tags": "LLM,CHAT,65K",
"max_tokens": 65536,
"model_type": "chat",
"is_tools": true
},
{
"llm_name": "qwen/qwen3-next-80b-a3b-thinking",
"tags": "LLM,CHAT,65K",
"max_tokens": 65536,
"model_type": "chat",
"is_tools": true
},
{
"llm_name": "sao10k/l3-70b-euryale-v2.1",
"tags": "LLM,CHAT,8K",
"max_tokens": 8192,
"model_type": "chat",
"is_tools": true
},
{
"llm_name": "sao10k/l3-8b-lunaris",
"tags": "LLM,CHAT,8K",
"max_tokens": 8192,
"model_type": "chat",
"is_tools": true
},
{
"llm_name": "sao10k/l31-70b-euryale-v2.2",
"tags": "LLM,CHAT,8K",
"max_tokens": 8192,
"model_type": "chat",
"is_tools": true
},
{
"llm_name": "thudm/glm-4.1v-9b-thinking",
"tags": "LLM,CHAT,65K",
"max_tokens": 65536,
"model_type": "chat",
"is_tools": false
},
{
"llm_name": "zai-org/glm-4.5",
"tags": "LLM,CHAT,131K",
"max_tokens": 131072,
"model_type": "chat",
"is_tools": true
},
{
"llm_name": "zai-org/glm-4.5v",
"tags": "LLM,CHAT,65K",
"max_tokens": 65536,
"model_type": "chat",
"is_tools": true
},
{
"llm_name": "baai/bge-m3",
"tags": "TEXT EMBEDDING,8K",
"max_tokens": 8192,
"model_type": "embedding"
},
{
"llm_name": "qwen/qwen3-embedding-0.6b",
"tags": "TEXT EMBEDDING,32K",
"max_tokens": 32768,
"model_type": "embedding"
},
{
"llm_name": "qwen/qwen3-embedding-8b",
"tags": "TEXT EMBEDDING,32K",
"max_tokens": 32768,
"model_type": "embedding"
},
{
"llm_name": "baai/bge-reranker-v2-m3",
"tags": "RE-RANK,8K",
"max_tokens": 8000,
"model_type": "reranker"
},
{
"llm_name": "qwen/qwen3-reranker-8b",
"tags": "RE-RANK,32K",
"max_tokens": 32768,
"model_type": "reranker"
}
]
}
]
}

View File

@ -67,6 +67,7 @@ A complete list of models supported by RAGFlow, which will continue to expand.
| 302.AI | :heavy_check_mark: | :heavy_check_mark: | :heavy_check_mark: | :heavy_check_mark: | | |
| CometAPI | :heavy_check_mark: | :heavy_check_mark: | | | | |
| DeerAPI | :heavy_check_mark: | :heavy_check_mark: | | :heavy_check_mark: | | :heavy_check_mark: |
| Jiekou.AI | :heavy_check_mark: | :heavy_check_mark: | :heavy_check_mark: | | | |
```mdx-code-block
</APITable>

View File

@ -693,7 +693,7 @@ Released on August 26, 2024.
- Incorporates monitoring for the task executor.
- Introduces Agent tools **GitHub**, **DeepL**, **BaiduFanyi**, **QWeather**, and **GoogleScholar**.
- Supports chunking of EML files.
- Supports more LLMs or model services: **GPT-4o-mini**, **PerfXCloud**, **TogetherAI**, **Upstage**, **Novita AI**, **01.AI**, **SiliconFlow**, **PPIO**, **XunFei Spark**, **Baidu Yiyan**, and **Tencent Hunyuan**.
- Supports more LLMs or model services: **GPT-4o-mini**, **PerfXCloud**, **TogetherAI**, **Upstage**, **Novita AI**, **01.AI**, **SiliconFlow**, **PPIO**, **XunFei Spark**, **Jiekou.AI**, **Baidu Yiyan**, and **Tencent Hunyuan**.
## v0.9.0

View File

@ -49,6 +49,7 @@ class SupportedLiteLLMProvider(StrEnum):
Lingyi_AI = "01.AI"
GiteeAI = "GiteeAI"
AI_302 = "302.AI"
JiekouAI = "Jiekou.AI"
FACTORY_DEFAULT_BASE_URL = {
@ -69,6 +70,7 @@ FACTORY_DEFAULT_BASE_URL = {
SupportedLiteLLMProvider.GiteeAI: "https://ai.gitee.com/v1/",
SupportedLiteLLMProvider.AI_302: "https://api.302.ai/v1",
SupportedLiteLLMProvider.Anthropic: "https://api.anthropic.com/",
SupportedLiteLLMProvider.JiekouAI: "https://api.jiekou.ai/openai",
}
@ -99,6 +101,7 @@ LITELLM_PROVIDER_PREFIX = {
SupportedLiteLLMProvider.Lingyi_AI: "openai/",
SupportedLiteLLMProvider.GiteeAI: "openai/",
SupportedLiteLLMProvider.AI_302: "openai/",
SupportedLiteLLMProvider.JiekouAI: "openai/",
}
ChatModel = globals().get("ChatModel", {})

View File

@ -1397,6 +1397,7 @@ class LiteLLMBase(ABC):
"01.AI",
"GiteeAI",
"302.AI",
"Jiekou.AI",
]
def __init__(self, key, model_name, base_url=None, **kwargs):

View File

@ -931,3 +931,12 @@ class DeerAPIEmbed(OpenAIEmbed):
if not base_url:
base_url = "https://api.deerapi.com/v1"
super().__init__(key, model_name, base_url)
class JiekouAIEmbed(OpenAIEmbed):
_FACTORY_NAME = "Jiekou.AI"
def __init__(self, key, model_name, base_url="https://api.jiekou.ai/openai/v1/embeddings"):
if not base_url:
base_url = "https://api.jiekou.ai/openai/v1/embeddings"
super().__init__(key, model_name, base_url)

View File

@ -489,3 +489,12 @@ class Ai302Rerank(Base):
if not base_url:
base_url = "https://api.302.ai/v1/rerank"
super().__init__(key, model_name, base_url)
class JiekouAIRerank(JinaRerank):
_FACTORY_NAME = "Jiekou.AI"
def __init__(self, key, model_name, base_url="https://api.jiekou.ai/openai/v1/rerank"):
if not base_url:
base_url = "https://api.jiekou.ai/openai/v1/rerank"
super().__init__(key, model_name, base_url)

View File

@ -0,0 +1,3 @@
<svg width="24" height="24" viewBox="0 0 24 24" fill="none" xmlns="http://www.w3.org/2000/svg">
<path d="M16 4H4V20H16C18.2091 20 20 18.2091 20 16V8H24V16C24 20.4183 20.4183 24 16 24H4C1.79086 24 1.61064e-08 22.2091 0 20V0H16V4ZM24 4H20V0H24V4Z" fill="#7C3AED"/>
</svg>

After

Width:  |  Height:  |  Size: 270 B

View File

@ -58,6 +58,7 @@ export enum LLMFactory {
Longcat = 'LongCat',
CometAPI = 'CometAPI',
DeerAPI = 'DeerAPI',
JiekouAI = 'Jiekou.AI',
Builtin = 'Builtin',
}
@ -122,5 +123,6 @@ export const IconMap = {
[LLMFactory.Longcat]: 'longcat',
[LLMFactory.CometAPI]: 'cometapi',
[LLMFactory.DeerAPI]: 'deerapi',
[LLMFactory.JiekouAI]: 'jiekouai',
[LLMFactory.Builtin]: 'builtin',
};

View File

@ -46,6 +46,7 @@ const orderFactoryList = [
LLMFactory.Ai302,
LLMFactory.CometAPI,
LLMFactory.DeerAPI,
LLMFactory.JiekouAI,
];
export const sortLLmFactoryListBySpecifiedOrder = (list: IFactory[]) => {