feat(bridge): spg server bridge supports config check and run solver (#287)

* x

* x (#280)

* bridge add solver

* x

* feat(bridge): spg server bridge (#283)

* x

* bridge add solver

* x

* add invoke

* llm client catch error
This commit is contained in:
zhuzhongshu123 2025-01-17 13:52:00 +08:00 committed by GitHub
parent ca31351971
commit deae277510
No known key found for this signature in database
GPG Key ID: B5690EEEBB952194
2 changed files with 34 additions and 4 deletions

View File

@ -16,7 +16,6 @@ from kag.common.conf import KAGConstants, init_env
def init_kag_config(project_id: str, host_addr: str):
os.environ[KAGConstants.ENV_KAG_PROJECT_ID] = project_id
os.environ[KAGConstants.ENV_KAG_PROJECT_HOST_ADDR] = host_addr
init_env()
@ -47,3 +46,34 @@ class SPGServerBridge:
if hasattr(instance.input_types, "from_dict"):
input_data = instance.input_types.from_dict(input_data)
return [x.to_dict() for x in instance.invoke(input_data, write_ckpt=False)]
def run_llm_config_check(self, llm_config):
from kag.common.llm.llm_config_checker import LLMConfigChecker
return LLMConfigChecker().check(llm_config)
def run_vectorizer_config_check(self, vec_config):
from kag.common.vectorize_model.vectorize_model_config_checker import (
VectorizeModelConfigChecker,
)
return VectorizeModelConfigChecker().check(vec_config)
def run_solver(
self,
project_id,
task_id,
query,
func_name="invoke",
is_report=True,
host_addr="http://127.0.0.1:8887",
):
from kag.solver.main_solver import SolverMain
return getattr(SolverMain(), func_name)(
project_id=project_id,
task_id=task_id,
query=query,
is_report=is_report,
host_addr=host_addr,
)

View File

@ -77,7 +77,7 @@ class LLMClient(Registrable):
variables: Dict[str, Any],
prompt_op: PromptABC,
with_json_parse: bool = True,
with_except: bool = True,
with_except: bool = False,
):
"""
Call the model and process the result.
@ -109,10 +109,10 @@ class LLMClient(Registrable):
except Exception as e:
import traceback
logger.error(f"Error {e} during invocation: {traceback.format_exc()}")
logger.debug(f"Error {e} during invocation: {traceback.format_exc()}")
if with_except:
raise RuntimeError(
f"LLM invoke exception, info: {e}\nllm input: {input}\nllm output: {response}"
f"LLM invoke exception, info: {e}\nllm input: \n{prompt}\nllm output: \n{response}"
)
return result