markqiu commited on
Commit
1717121
1 Parent(s): fb25388

调试qianfan成功,支持qianfan所有模型

Browse files
modules/models/ERNIE.py CHANGED
@@ -1,5 +1,5 @@
1
  import os
2
- import erniebot as eb
3
 
4
  from ..presets import *
5
  from ..utils import *
@@ -8,11 +8,9 @@ from .base_model import BaseLLMModel
8
 
9
 
10
  class ERNIE_Client(BaseLLMModel):
11
- def __init__(self, model_name, api_key, secret_key, api_type: str = "aistudio", access_token: str = None) -> None:
12
  super().__init__(model_name=model_name)
13
- self.auth_config = {"api_type": api_type, "ak": api_key, "sk": secret_key}
14
- if access_token:
15
- self.auth_config["access_token"] = access_token
16
 
17
  def get_answer_stream_iter(self):
18
  system_prompt = self.system_prompt
@@ -29,15 +27,11 @@ class ERNIE_Client(BaseLLMModel):
29
  "temperature": self.temperature,
30
  }
31
 
32
- if self.model_name == "chat_file":
33
- response = eb.ChatFile.create(_config_=self.auth_config, **data, stream=True)
34
- else:
35
- response = eb.ChatCompletion.create(
36
- _config_=self.auth_config, model=self.model_name, **data, stream=True
37
- )
38
  for result in response:
39
- yield result.get_result()
40
-
41
 
42
  def get_answer_at_once(self):
43
  system_prompt = self.system_prompt
@@ -48,16 +42,10 @@ class ERNIE_Client(BaseLLMModel):
48
  # 去除history中 history的role为system的
49
  history = [i for i in history if i["role"] != "system"]
50
 
51
-
52
  data = {
53
  "messages": history,
54
  "top_p": self.top_p,
55
  "temperature": self.temperature,
56
  }
57
 
58
- if self.model_name == "chat_file":
59
- return eb.ChatFile.create(_config_=self.auth_config, **data, stream=False).get_result()
60
- else:
61
- return eb.ChatCompletion.create(
62
- _config_=self.auth_config, model=self.model_name, **data, stream=False
63
- ).get_result()
 
1
  import os
2
+ import qianfan
3
 
4
  from ..presets import *
5
  from ..utils import *
 
8
 
9
 
10
  class ERNIE_Client(BaseLLMModel):
11
+ def __init__(self, model_name, api_key, secret_key) -> None:
12
  super().__init__(model_name=model_name)
13
+ self.chat_completion = qianfan.ChatCompletion(ak=api_key, sk=secret_key)
 
 
14
 
15
  def get_answer_stream_iter(self):
16
  system_prompt = self.system_prompt
 
27
  "temperature": self.temperature,
28
  }
29
 
30
+ response = self.chat_completion.do(model=self.model_name, **data, stream=True)
31
+ partial_text = ""
 
 
 
 
32
  for result in response:
33
+ partial_text += result['result']
34
+ yield partial_text
35
 
36
  def get_answer_at_once(self):
37
  system_prompt = self.system_prompt
 
42
  # 去除history中 history的role为system的
43
  history = [i for i in history if i["role"] != "system"]
44
 
 
45
  data = {
46
  "messages": history,
47
  "top_p": self.top_p,
48
  "temperature": self.temperature,
49
  }
50
 
51
+ return self.chat_completion.do(model=self.model_name, **data)['result']
 
 
 
 
 
modules/models/models.py CHANGED
@@ -128,7 +128,7 @@ def get_model(
128
  model = Qwen_Client(model_name, user_name=user_name)
129
  elif model_type == ModelType.ERNIE:
130
  from .ERNIE import ERNIE_Client
131
- model = ERNIE_Client(model_name, api_key=os.getenv("ERNIE_APIKEY"),secret_key=os.getenv("ERNIE_SECRETKEY"), api_type=os.getenv("ERNIE_API_TYPE"), access_token=os.getenv("ERNIE_ACCESS_TOKEN"))
132
  elif model_type == ModelType.DALLE3:
133
  from .DALLE3 import OpenAI_DALLE3_Client
134
  access_key = os.environ.get("OPENAI_API_KEY", access_key)
 
128
  model = Qwen_Client(model_name, user_name=user_name)
129
  elif model_type == ModelType.ERNIE:
130
  from .ERNIE import ERNIE_Client
131
+ model = ERNIE_Client(model_name, api_key=os.getenv("ERNIE_APIKEY"),secret_key=os.getenv("ERNIE_SECRETKEY"))
132
  elif model_type == ModelType.DALLE3:
133
  from .DALLE3 import OpenAI_DALLE3_Client
134
  access_key = os.environ.get("OPENAI_API_KEY", access_key)
requirements.txt CHANGED
@@ -33,4 +33,4 @@ pydantic==2.5.2
33
  google-search-results
34
  anthropic==0.3.11
35
  Pillow>=10.1.0
36
- erniebot
 
33
  google-search-results
34
  anthropic==0.3.11
35
  Pillow>=10.1.0
36
+ qianfan>=0.2.2