from Config.Config import MODEL_API_KEY, MODEL_NAME from openai import OpenAI # 初始化客户端 client = OpenAI( api_key=MODEL_API_KEY, base_url="https://dashscope.aliyuncs.com/compatible-mode/v1" # 通义千问兼容模式API地址 ) def call_qwen_plus(prompt, stream_callback=None): """调用通义千问API,支持流式返回""" try: response = client.chat.completions.create( model=MODEL_NAME, messages=[ {"role": "system", "content": "你是一个专业的数学教学助手"}, {"role": "user", "content": prompt} ], temperature=0.7, stream=True # 启用流式返回 ) full_response = "" for chunk in response: content = chunk.choices[0].delta.content if content: full_response += content if stream_callback: stream_callback(content) # 实时回调处理流式数据 return full_response except Exception as e: print(f"调用API出错: {e}") return None # 示例用法 if __name__ == "__main__": def print_chunk(chunk): print(chunk, end="", flush=True) test_prompt = "请解释勾股定理" print("通义千问响应:") result = call_qwen_plus(test_prompt, stream_callback=print_chunk) print("通义千问响应:", result)