主题
LangChain调用ChatGLM
python
from langchain.llms.base import LLM
from zhipuai import ZhipuAI
from langchain_core.messages.ai import AIMessage
class ChatGLM4(LLM):
history = []
client:object = None
def __init__(self):
super().__init__()
zhipuai_api_key = os.getenv('ZHUPU_API_KEY')
self.client = ZhipuAI(api_key=zhipuai_api_key)
@property
def _llm_type(self):
return "ChatGLM4"
def invoke(self,prompt,config={},history=[]):
if history is None:
history=[]
if not isinstance(prompt, str):
prompt = prompt.to_string()
history.append({"role":"user","content":prompt })
response = self.client.chat.completions.create(
model="glm-4",
messages=history
)
result = response.choices[0].message.content
return AIMessage(content=result)
def _call(self,prompt,config,history=[]):
return self.invoke(prompt,history)
def stream(self,prompt,config,history=[]):
if history is None:
history=[]
if not isinstance(prompt, str):
prompt = prompt.to_string()
history.append({"role":"user","content":prompt})
response = self.client.chat.completions.create(
model="glm-4",
messages=history,
stream=True
)
for chunk in response:
yield chunk.choices[0].delta.content