From 63c4a733945cd9d72f6d9cf2a65c278143af0100 Mon Sep 17 00:00:00 2001 From: MichelleMa8 Date: Thu, 7 Dec 2023 17:07:23 +0800 Subject: [PATCH 1/2] fix pangu api --- applications/ColossalQA/colossalqa/memory.py | 2 +- .../ColossalQA/examples/webui_demo/server.py | 12 ++++++++---- 2 files changed, 9 insertions(+), 5 deletions(-) diff --git a/applications/ColossalQA/colossalqa/memory.py b/applications/ColossalQA/colossalqa/memory.py index 255df68a367e..7a5512281035 100644 --- a/applications/ColossalQA/colossalqa/memory.py +++ b/applications/ColossalQA/colossalqa/memory.py @@ -154,7 +154,7 @@ def load_memory_variables(self, inputs: Dict[str, Any]) -> Dict[str, str]: remain = self.max_tokens - prompt_length while self.get_conversation_length() > remain: if len(self.buffered_history.messages) <= 2: - raise RuntimeError("Exeeed max_tokens, trunck size of retrieved documents is too large") + raise RuntimeError("Exceed max_tokens, trunk size of retrieved documents is too large") temp = self.buffered_history.messages.pop(0) self.summarized_history_temp.messages.append(temp) temp = self.buffered_history.messages.pop(0) diff --git a/applications/ColossalQA/examples/webui_demo/server.py b/applications/ColossalQA/examples/webui_demo/server.py index 050994567570..442de6ec4386 100644 --- a/applications/ColossalQA/examples/webui_demo/server.py +++ b/applications/ColossalQA/examples/webui_demo/server.py @@ -77,12 +77,16 @@ def generate(data: GenerationTaskReq, request: Request): colossal_api = ColossalAPI(model_name, all_config["model"]["model_path"]) llm = ColossalLLM(n=1, api=colossal_api) elif all_config["model"]["mode"] == "api": - all_config["chain"]["mem_llm_kwargs"] = None - all_config["chain"]["disambig_llm_kwargs"] = None - all_config["chain"]["gen_llm_kwargs"] = None if model_name == "pangu_api": from colossalqa.local.pangu_llm import Pangu - llm = Pangu(id=1) + + gen_config = { + "user": "User", + "max_tokens": all_config["chain"]["disambig_llm_kwargs"]["max_new_tokens"], + "temperature": all_config["chain"]["disambig_llm_kwargs"]["temperature"], + "n": 1 + } + llm = Pangu(gen_config=gen_config) llm.set_auth_config() # verify user's auth info here elif model_name == "chatgpt_api": from langchain.llms import OpenAI From a88eb469f7928dc018023a34f526508c204877b7 Mon Sep 17 00:00:00 2001 From: MichelleMa8 Date: Mon, 11 Dec 2023 10:24:39 +0800 Subject: [PATCH 2/2] add comment --- applications/ColossalQA/examples/webui_demo/server.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/applications/ColossalQA/examples/webui_demo/server.py b/applications/ColossalQA/examples/webui_demo/server.py index 442de6ec4386..3b0f82845c87 100644 --- a/applications/ColossalQA/examples/webui_demo/server.py +++ b/applications/ColossalQA/examples/webui_demo/server.py @@ -84,7 +84,7 @@ def generate(data: GenerationTaskReq, request: Request): "user": "User", "max_tokens": all_config["chain"]["disambig_llm_kwargs"]["max_new_tokens"], "temperature": all_config["chain"]["disambig_llm_kwargs"]["temperature"], - "n": 1 + "n": 1 # the number of responses generated } llm = Pangu(gen_config=gen_config) llm.set_auth_config() # verify user's auth info here