From d7f4b32efaa81283fb018e47bac57a979f7970ef Mon Sep 17 00:00:00 2001 From: HuangHai <10402852@qq.com> Date: Fri, 15 Aug 2025 16:18:41 +0800 Subject: [PATCH] 'commit' --- dsLightRag/Test/TestGoogleGemini.py | 79 +----------------- dsLightRag/Util/GoApiUtil.py | 78 +++++++++++++++++ .../__pycache__/GoApiUtil.cpython-310.pyc | Bin 0 -> 2221 bytes 3 files changed, 79 insertions(+), 78 deletions(-) create mode 100644 dsLightRag/Util/GoApiUtil.py create mode 100644 dsLightRag/Util/__pycache__/GoApiUtil.cpython-310.pyc diff --git a/dsLightRag/Test/TestGoogleGemini.py b/dsLightRag/Test/TestGoogleGemini.py index 853ea2fa..a69f587f 100644 --- a/dsLightRag/Test/TestGoogleGemini.py +++ b/dsLightRag/Test/TestGoogleGemini.py @@ -1,81 +1,4 @@ -import json - -import requests - -from Config.Config import GPTNB_API_KEY - - -class ModelInteractor: - def __init__(self, api_key=GPTNB_API_KEY, api_url="https://goapi.gptnb.ai/v1/chat/completions"): - self.api_key = api_key - self.api_url = api_url - self.headers = { - "Content-Type": "application/json", - "Authorization": f"Bearer {self.api_key}" - } - - def stream_request(self, model, prompt, temperature=0.7): - """ - 发送流式请求到模型API - - 参数: - - model: 模型名称 - - prompt: 用户提示词 - - temperature: 温度参数,控制输出的随机性 - - 返回: - - 无返回值,直接打印流式响应 - """ - payload = { - "model": model, - "messages": [{ - "role": "user", - "content": prompt - }], - "temperature": temperature, - "stream": True - } - - try: - response = requests.post( - self.api_url, - headers=self.headers, - data=json.dumps(payload), - stream=True, - timeout=30 - ) - response.raise_for_status() - - print(f"使用模型 {model} 的流式响应内容: ") - for chunk in response.iter_content(chunk_size=None): - if chunk: - chunk_data = chunk.decode('utf-8', errors='replace') - - for line in chunk_data.splitlines(): - line = line.strip() - if not line: - continue - - if line == 'data: [DONE]': - print("\n流式响应结束") - return - - if line.startswith('data: '): - line = line[6:] - - try: - data = json.loads(line) - if 'choices' in data and len(data['choices']) > 0: - delta = data['choices'][0].get('delta', {}) - content = delta.get('content', '') - if content and content != '\n': - print(content, end='', flush=True) - except json.JSONDecodeError as e: - print(f"[调试] JSON解析错误: {e}, 内容: {line}") - - except requests.exceptions.RequestException as e: - print(f"请求发生错误: {e}") - +from Util.GoApiUtil import ModelInteractor # 示例使用 if __name__ == "__main__": diff --git a/dsLightRag/Util/GoApiUtil.py b/dsLightRag/Util/GoApiUtil.py new file mode 100644 index 00000000..eb99ba37 --- /dev/null +++ b/dsLightRag/Util/GoApiUtil.py @@ -0,0 +1,78 @@ +import json + +import requests + +from Config.Config import GPTNB_API_KEY + + +class ModelInteractor: + def __init__(self, api_key=GPTNB_API_KEY, api_url="https://goapi.gptnb.ai/v1/chat/completions"): + self.api_key = api_key + self.api_url = api_url + self.headers = { + "Content-Type": "application/json", + "Authorization": f"Bearer {self.api_key}" + } + + def stream_request(self, model, prompt, temperature=0.7): + """ + 发送流式请求到模型API + + 参数: + - model: 模型名称 + - prompt: 用户提示词 + - temperature: 温度参数,控制输出的随机性 + + 返回: + - 无返回值,直接打印流式响应 + """ + payload = { + "model": model, + "messages": [{ + "role": "user", + "content": prompt + }], + "temperature": temperature, + "stream": True + } + + try: + response = requests.post( + self.api_url, + headers=self.headers, + data=json.dumps(payload), + stream=True, + timeout=30 + ) + response.raise_for_status() + + print(f"使用模型 {model} 的流式响应内容: ") + for chunk in response.iter_content(chunk_size=None): + if chunk: + chunk_data = chunk.decode('utf-8', errors='replace') + + for line in chunk_data.splitlines(): + line = line.strip() + if not line: + continue + + if line == 'data: [DONE]': + print("\n流式响应结束") + return + + if line.startswith('data: '): + line = line[6:] + + try: + data = json.loads(line) + if 'choices' in data and len(data['choices']) > 0: + delta = data['choices'][0].get('delta', {}) + content = delta.get('content', '') + if content and content != '\n': + print(content, end='', flush=True) + except json.JSONDecodeError as e: + print(f"[调试] JSON解析错误: {e}, 内容: {line}") + + except requests.exceptions.RequestException as e: + print(f"请求发生错误: {e}") + diff --git a/dsLightRag/Util/__pycache__/GoApiUtil.cpython-310.pyc b/dsLightRag/Util/__pycache__/GoApiUtil.cpython-310.pyc new file mode 100644 index 0000000000000000000000000000000000000000..54f36ab92ecbbb841856f8ec7b1b612aaf0bf953 GIT binary patch literal 2221 zcmZ8i>u(!H5Z_(joi7(VanlEV2^ql`BbvY$grW-QBdQ8CRBcgJ({dc%O?-~+yX@_? z#5z~$3yNr(q6QFsCMwVtBoZW^p}=?kg#8M3k`IW_fK-_EC9T^%-R$h_?Cfu5c4xhG z+CtFw{CTbTCUC#2L3>NU;6?a@ULX`vTt@l0$9W86QYMu|K7r9LL=#jyji`2t+xG6ZUmiGAP~!G>Z<8lL%8)XgCoDnn5TS%7PGc}=Qk@#W z=`;zZJ7o7BkFxUas$kqH2#?pEEDGTV{e6AIp5wc{!@j7#)$6!@N1y2{6dlo5@G5?p z3D>Iz1-0QyQ@r|jz#k|85sr|JA_5dsOrbBK%jhZqxuRr*A^@Nb)-sOom)(M+O!k!m zuUboPXAWmPnB~n?>zb4%B;a%P~-&I_% zKPFAvcB`(iZM8m7i8io-39)eMZf*TT0qk8-UKnb%<_iT4R;;u%Fe&(3VfqNGFi{*q zey)&V93@opuV{6xc0-^N1hAySmJr&s9%(ZeW+8hMdR>q7X=4UqB$6~4;me4cR}-f& zMv-1JX$Q3?@J@8(c-@E${$*s8I%p?N(ewnVC!^%FHG^QbS~nwex^o7pJ{TC?8kw|f zgz$Y}vs>9x-$6t>_9M_s(aa3yM?hy4?Knd=z^dBQfGZuDGq~PCd!mk`h@TT(5%3$( zDNI+7X#89}qN_Kc>2A8lMKh#sMHcXD={n#$qfXH2q>qdwU~Lk(b>QRr3AhLc(dbqU z39XbF`vRkS3Zq&EkZp)k;O)l9grlS>ln2ovz*tWmPmOMk`(yYR8r>2j2MegQs?;;~ z+p>)=u(3(mxJn*et^B;dKDRjp49LnZx5nfLOXDY+-=1jPnOvOxzWMdZ#@V^%t(nHP z_u)2LZF(o0AI|mP8*j;0)cxwu#=4CQ6AL%z+Gc$Y*Gcqe7p87C&wk&Wys+@;{Nn6H zn}%Q&AMU0IIRh&{+-}T&-k4OI{B`GC^Zd=m+3yyAx!8Dje&N#TrAw2|EA!3qoA>c7 z{yNpTe61Z%^W*7Q*ciV9CKfJ#(>(uK^Sz6WiMiOl#>I)o{M0=JQVRpdb3|&~D>Ioa z#24~FPLq1eH)&Q_5IDmukezK{r4b0uoXXOpaN2VkPeLa(>IjFgR%}wZ73PH^R5#*} zpMO;mXvH%dt_MjL{AtJ2c;`&xs~`Kbstj6%Vptuq1GmO%dMJjrJSUT!`DLfTVAf!q zdpxM6)L#AB*LS`;IB+CXds^)V7k;|fyn4N6#9B+LLV$Bc&n>WkL+X*#A#s4VTk%L@ z3u2I9RVwwNau^iDO(1`LaqiUO?1x9PFYSMIaPj7i=Cun;Q=cr(&h}?RRVSXxuHX*W zz;OBGOn$I1b$vxIw_2Kw^gV5y3H66s2DF92&?(nN?+CmKZ*86eV#k1_92gQV|_Dcj1ez=vmoNxhY@ z{Bqt@#HFTy@wLjtW<^?2#;jGMW1`|GyW&G+FGpw<$bvV3kTf9#8+VBY^lxHmSbZiY z@EQM_$qdQBxC^I<2|d9k^cJ>As#Uhv<25)1Ul-QKGdPRQaq=G-HA27n@ZJ+l;krw<2e5jnO=>&T^n^o5&<7(v$cf D`wXMM literal 0 HcmV?d00001