from openai import OpenAI from Config.Config import MODEL_API_KEY, MODEL_NAME class ALiYunUtil: def __init__(self): self.client = OpenAI( api_key=MODEL_API_KEY, base_url="https://dashscope.aliyuncs.com/compatible-mode/v1" ) self.model_name = MODEL_NAME def chat(self, prompt, model=None): """ 与阿里云大模型对话 :param prompt: 用户输入的问题 :param model: 可选,指定使用的模型,默认使用Config中的MODEL_NAME :return: 模型返回的答案 """ try: completion = self.client.chat.completions.create( model=model or self.model_name, messages=[ {'role': 'user', 'content': prompt} ] ) return completion.choices[0].message.content except Exception as e: return f"发生错误: {str(e)}" def chat_stream(self, prompt, model=None): """ 与阿里云大模型流式对话 :param prompt: 用户输入的问题 :param model: 可选,指定使用的模型 :return: 异步生成器,返回模型流式响应 """ try: stream = self.client.chat.completions.create( model=model or self.model_name, messages=[ {'role': 'user', 'content': prompt} ], stream=True ) for chunk in stream: if chunk.choices[0].delta.content: yield chunk.choices[0].delta.content except Exception as e: yield f"发生错误: {str(e)}"