From 9a74fd2ee8d82e337682442ff55bf93a7b609163 Mon Sep 17 00:00:00 2001 From: yihong Date: Sun, 28 Jan 2024 23:09:44 +0800 Subject: [PATCH] fix: #436 (#440) * fix: #436 Signed-off-by: yihong0618 * fix: lint Signed-off-by: yihong0618 --------- Signed-off-by: yihong0618 --- pdm.lock | 28 +++++++++------------------- pyproject.toml | 2 +- requirements.txt | 3 +-- xiaogpt/bot/bard_bot.py | 1 + xiaogpt/bot/gemini_bot.py | 1 + xiaogpt/bot/glm_bot.py | 39 ++++++++++++++++++++++++++++----------- xiaogpt/bot/qwen_bot.py | 7 +++---- xiaogpt/cli.py | 4 ++-- 8 files changed, 46 insertions(+), 39 deletions(-) diff --git a/pdm.lock b/pdm.lock index 05c1f101..6d450195 100644 --- a/pdm.lock +++ b/pdm.lock @@ -5,7 +5,7 @@ groups = ["default"] strategy = ["cross_platform", "inherit_metadata"] lock_version = "4.4.1" -content_hash = "sha256:ac4a524e864b0e49972c6b07f78af89126d37a21b947d66484bf16a14332f1ef" +content_hash = "sha256:255e73e031af7be56dbbde067b3a0b3fd2507d56fdfe217e0629ae7ecce56729" [[package]] name = "aiohttp" @@ -332,16 +332,6 @@ files = [ {file = "dashscope-1.10.0-py3-none-any.whl", hash = "sha256:0dd0a6cfeaf12bfab24825993add3f8e3370c2a36cfdb09b5b1a73c793f372e1"}, ] -[[package]] -name = "dataclasses" -version = "0.6" -summary = "A backport of the dataclasses module for Python 3.6" -groups = ["default"] -files = [ - {file = "dataclasses-0.6-py3-none-any.whl", hash = "sha256:454a69d788c7fda44efd71e259be79577822f5e3f53f029a22d08004e951dc9f"}, - {file = "dataclasses-0.6.tar.gz", hash = "sha256:6988bd2b895eef432d562370bb707d540f32f7360ab13da45340101bc2307d84"}, -] - [[package]] name = "dataclasses-json" version = "0.6.3" @@ -1958,17 +1948,17 @@ files = [ [[package]] name = "zhipuai" -version = "1.0.7" -requires_python = ">=3.6" +version = "2.0.1" +requires_python = ">=3.7" summary = "A SDK library for accessing big model apis from ZhipuAI" groups = ["default"] dependencies = [ - "PyJWT", - "cachetools", - "dataclasses", - "requests", + "cachetools>=4.2.2", + "httpx>=0.23.0", + "pydantic>=2.5.2", + "pyjwt~=2.8.0", ] files = [ - {file = "zhipuai-1.0.7-py3-none-any.whl", hash = "sha256:360c01b8c2698f366061452e86d5a36a5ff68a576ea33940da98e4806f232530"}, - {file = "zhipuai-1.0.7.tar.gz", hash = "sha256:b80f699543d83cce8648acf1ce32bc2725d1c1c443baffa5882abc2cc704d581"}, + {file = "zhipuai-2.0.1-py3-none-any.whl", hash = "sha256:738033d95696c3d5117dc4487e37d924e3ebbcdfa0072812b3f63a08ff72274a"}, + {file = "zhipuai-2.0.1.tar.gz", hash = "sha256:297bbdbe9393da2d1dc8066c39cf39bb2342f170d86f2b7b7a13ba368c53d701"}, ] diff --git a/pyproject.toml b/pyproject.toml index 97685340..5aaf2fb0 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -16,7 +16,7 @@ dependencies = [ "openai>=1", "aiohttp", "rich", - "zhipuai", + "zhipuai==2.0.1", "bardapi", "edge-tts>=6.1.3", "EdgeGPT==0.1.26", diff --git a/requirements.txt b/requirements.txt index 414612de..ade5ea02 100644 --- a/requirements.txt +++ b/requirements.txt @@ -16,7 +16,6 @@ certifi==2023.7.22 charset-normalizer==3.3.2 colorama==0.4.6 dashscope==1.10.0 -dataclasses==0.6 dataclasses-json==0.6.3 deep-translator==1.11.4 distro==1.9.0 @@ -88,4 +87,4 @@ urllib3==2.1.0 wcwidth==0.2.13 websockets==12.0 yarl==1.9.4 -zhipuai==1.0.7 +zhipuai==2.0.1 diff --git a/xiaogpt/bot/bard_bot.py b/xiaogpt/bot/bard_bot.py index d85dee9c..9c97ec19 100644 --- a/xiaogpt/bot/bard_bot.py +++ b/xiaogpt/bot/bard_bot.py @@ -1,4 +1,5 @@ """ChatGLM bot""" + from __future__ import annotations from typing import Any diff --git a/xiaogpt/bot/gemini_bot.py b/xiaogpt/bot/gemini_bot.py index 44ffe66b..f3ac6341 100644 --- a/xiaogpt/bot/gemini_bot.py +++ b/xiaogpt/bot/gemini_bot.py @@ -1,4 +1,5 @@ """Google Gemini bot""" + from __future__ import annotations from typing import Any diff --git a/xiaogpt/bot/glm_bot.py b/xiaogpt/bot/glm_bot.py index 130903e3..98799f1f 100644 --- a/xiaogpt/bot/glm_bot.py +++ b/xiaogpt/bot/glm_bot.py @@ -1,4 +1,5 @@ """ChatGLM bot""" + from __future__ import annotations from typing import Any @@ -13,34 +14,50 @@ class GLMBot(ChatHistoryMixin, BaseBot): default_options = {"model": "chatglm_turbo"} def __init__(self, glm_key: str) -> None: - import zhipuai + from zhipuai import ZhipuAI + + self.model = "glm-4" # Change glm model here self.history = [] - zhipuai.api_key = glm_key + self.client = ZhipuAI(api_key=glm_key) @classmethod def from_config(cls, config): return cls(glm_key=config.glm_key) def ask(self, query, **options): - import zhipuai - ms = self.get_messages() kwargs = {**self.default_options, **options} - kwargs["prompt"] = ms + kwargs["model"] = self.model ms.append({"role": "user", "content": f"{query}"}) + kwargs["messages"] = ms try: - r = zhipuai.model_api.sse_invoke(**kwargs) + r = self.client.chat.completions.create(**kwargs) except Exception as e: print(str(e)) return - message = "" - for i in r.events(): - message += str(i.data) + message = r.choices[0].message.content self.add_message(query, message) print(message) return message - def ask_stream(self, query: str, **options: Any): - raise Exception("GLM do not support stream") + async def ask_stream(self, query: str, **options: Any): + ms = self.get_messages() + kwargs = {**self.default_options, **options} + kwargs["model"] = self.model + ms.append({"role": "user", "content": f"{query}"}) + kwargs["messages"] = ms + kwargs["stream"] = True + try: + r = self.client.chat.completions.create(**kwargs) + except Exception as e: + print(str(e)) + return + full_content = "" + for chunk in r: + content = chunk.choices[0].delta.content + full_content += content + print(content, end="") + yield content + self.add_message(query, full_content) diff --git a/xiaogpt/bot/qwen_bot.py b/xiaogpt/bot/qwen_bot.py index dd811a34..ad6d9450 100644 --- a/xiaogpt/bot/qwen_bot.py +++ b/xiaogpt/bot/qwen_bot.py @@ -1,4 +1,5 @@ -"""ChatGLM bot""" +"""Qwen bot""" + from __future__ import annotations from http import HTTPStatus @@ -16,9 +17,7 @@ def __init__(self, qwen_key: str) -> None: import dashscope from dashscope.api_entities.dashscope_response import Role - self.history = [ - {"role": Role.SYSTEM, "content": "You are a helpful assistant."} - ] + self.history = [] dashscope.api_key = qwen_key @classmethod diff --git a/xiaogpt/cli.py b/xiaogpt/cli.py index 4a5003e6..b145eab6 100644 --- a/xiaogpt/cli.py +++ b/xiaogpt/cli.py @@ -195,8 +195,8 @@ def main(): ) options = parser.parse_args() - if options.bot in ["glm", "bard"] and options.stream: - raise Exception("For now ChatGLM do not support stream") + if options.bot in ["bard"] and options.stream: + raise Exception("For now Bard do not support stream") config = Config.from_options(options) miboy = MiGPT(config)