Skip to content

Commit 7641f33

Browse files
committed
- patch : Fail to generate response. Resolves #15. Resolves #14
- patch : Drop support for websocket.
1 parent 7451be9 commit 7641f33

File tree

7 files changed

+67
-131
lines changed

7 files changed

+67
-131
lines changed

WebChatGPT/errors.py

-4
Original file line numberDiff line numberDiff line change
@@ -1,7 +1,3 @@
1-
class WebSocketError(Exception):
2-
pass
3-
4-
51
class CookieExpiredError(Exception):
62
pass
73

WebChatGPT/main.py

+54-117
Original file line numberDiff line numberDiff line change
@@ -1,69 +1,10 @@
11
#!/usr/bin/python
22
import requests
33
from WebChatGPT import utils
4-
import logging
54
import json
65
import re
76
from functools import lru_cache
8-
import websocket
9-
from base64 import b64decode
10-
from WebChatGPT.errors import WebSocketError
11-
from threading import Thread as thr
127
from typing import Iterator
13-
from .errors import MaximumRetrialError
14-
15-
16-
class Websocket:
17-
18-
def __init__(
19-
self,
20-
data: dict,
21-
chatgpt: object,
22-
trace: bool = False,
23-
):
24-
chatgpt.socket_closed = False
25-
chatgpt.loading_chunk = ""
26-
self.payload = data.copy()
27-
self.url = data.get("wss_url")
28-
self.payload.pop("wss_url")
29-
self.chatgpt = chatgpt
30-
self.last_response_chunk: dict = {}
31-
self.last_response_undecoded_chunk: dict = {}
32-
websocket.enableTrace(trace)
33-
34-
def on_message(self, ws, message):
35-
response = json.loads(message)
36-
self.chatgpt.last_response_undecoded_chunk = response
37-
decoded_body = b64decode(response["body"]).decode("utf-8")
38-
response["body"] = decoded_body
39-
self.chatgpt.last_response_chunk = response
40-
self.chatgpt.loading_chunk = decoded_body
41-
42-
def on_error(self, ws, error):
43-
self.on_close("ws")
44-
raise WebSocketError(error)
45-
46-
def on_close(self, ws, *args, **kwargs):
47-
self.chatgpt.socket_closed = True
48-
49-
def on_open(
50-
self,
51-
ws,
52-
):
53-
json_data = json.dumps(self.payload, indent=4)
54-
ws.send(json_data)
55-
56-
def run(
57-
self,
58-
):
59-
ws = websocket.WebSocketApp(
60-
self.url,
61-
on_message=self.on_message,
62-
on_error=self.on_error,
63-
on_close=self.on_close,
64-
on_open=self.on_open,
65-
)
66-
ws.run_forever(origin="https://chat.openai.com")
678

689

6910
class ChatGPT:
@@ -127,6 +68,9 @@ def __init__(
12768
self.stop_sharing_conversation_endpoint = (
12869
"https://chat.openai.com/backend-api/%(share_id)s"
12970
)
71+
self.sentinel_chat_requirements_endpoint: str = (
72+
"https://chat.openai.com/backend-api/sentinel/chat-requirements"
73+
)
13074
self.session.headers["User-Agent"] = user_agent
13175
self.locale = locale
13276
self.model = model
@@ -139,12 +83,7 @@ def __init__(
13983
self.__already_init = False
14084
self.__index = conversation_index
14185
self.__title_cache = {}
142-
self.last_response_undecoded_chunk: str = ""
143-
self.last_response_chunk: dict = {}
144-
self.loading_chunk: str = ""
145-
self.socket_closed: bool = True
146-
self.trace = trace
147-
self.request_more_times: int = 2
86+
self.stream_chunk_size = 64
14887
# self.register_ws =self.session.post("https://chat.openai.com/backend-api/register-websocket")
14988
# Websocket(self.register_ws.json(),self).run()
15089

@@ -171,6 +110,13 @@ def current_conversation_id(self):
171110
def get_current_message_id(self):
172111
return self.last_response_metadata.get(2).get("message_id")
173112

113+
def update_sentinel_tokens(self):
114+
resp = self.session.post(self.sentinel_chat_requirements_endpoint, json={})
115+
resp.raise_for_status()
116+
self.session.headers.update(
117+
{"OpenAI-Sentinel-Chat-Requirements-Token": resp.json()["token"]}
118+
)
119+
174120
def ask(
175121
self,
176122
prompt: str,
@@ -228,32 +174,28 @@ def ask(
228174
}
229175
```
230176
"""
177+
self.update_sentinel_tokens()
231178
response = self.session.post(
232179
url=self.conversation_endpoint,
233180
json=self.__generate_payload(prompt),
234181
timeout=self.timeout,
235-
stream=False,
182+
stream=True,
236183
)
237-
response.raise_for_status()
238-
ws_payload = dict(response.json())
239-
self.__request_more_count: int = 0
240-
241-
# out = lambda v:print(json.dumps(dict(v), indent=4))
242-
# out(response.headers)
243-
def for_stream():
244-
245-
ws = Websocket(ws_payload, self, self.trace)
246-
t1 = thr(target=ws.run)
247-
t1.start()
248-
cached_loading_chunk = self.loading_chunk
249-
cached_last_response = self.last_response.copy()
250-
while True:
251-
if self.loading_chunk != cached_loading_chunk:
252-
# New chunk loaded
184+
# response.raise_for_status()
185+
if (
186+
response.ok
187+
and response.headers.get("content-type")
188+
== "text/event-stream; charset=utf-8"
189+
):
190+
191+
def for_stream():
192+
for value in response.iter_lines(
193+
decode_unicode=True,
194+
delimiter="data:",
195+
chunk_size=self.stream_chunk_size,
196+
):
253197
try:
254-
value = self.loading_chunk
255-
# print(value)
256-
to_dict = json.loads(value[5:])
198+
to_dict = json.loads(value)
257199
if "is_completion" in to_dict.keys():
258200
# Metadata (response)
259201
self.last_response_metadata[
@@ -269,40 +211,35 @@ def for_stream():
269211
yield value
270212
pass
271213

272-
finally:
273-
cached_loading_chunk = self.loading_chunk
274-
275-
if self.socket_closed:
276-
t1.join()
277-
break
278-
279-
if (
280-
self.last_response == cached_last_response
281-
or self.last_response["message"]["status"] != "finished_successfully"
282-
):
283-
284-
# print(json.dumps(self.last_response, indent=4))
285-
# print("Requesting more body")
286-
# print('=='*40)
287-
t1.join()
288-
if self.__request_more_count >= self.request_more_times:
289-
raise MaximumRetrialError(
290-
f"Failed to generate response after {self.request_more_times} attempts"
291-
)
292-
293-
for value in for_stream():
294-
yield value
295-
296-
self.__request_more_count += 1
297-
# else:
298-
# print(print(json.dumps(self.last_response_chunk, indent=4)))
214+
def for_non_stream():
215+
response_to_be_returned = {}
216+
for value in response.iter_lines(
217+
decode_unicode=True,
218+
delimiter="data:",
219+
chunk_size=self.stream_chunk_size,
220+
):
221+
try:
222+
to_dict = json.loads(value)
223+
if "is_completion" in to_dict.keys():
224+
# Metadata (response)
225+
self.last_response_metadata[
226+
2 if to_dict.get("is_completion") else 1
227+
] = to_dict
228+
continue
229+
# Only data containing the `feedback body` make it to here
230+
self.last_response.update(to_dict)
231+
response_to_be_returned.update(to_dict)
232+
except json.decoder.JSONDecodeError:
233+
# Caused by either empty string or [DONE]
234+
pass
235+
return response_to_be_returned
299236

300-
def for_non_stream():
301-
for _ in for_stream():
302-
pass
303-
return self.last_response
237+
return for_stream() if stream else for_non_stream()
304238

305-
return for_stream() if stream else for_non_stream()
239+
else:
240+
raise Exception(
241+
f"Failed to fetch response - ({response.status_code}, {response.reason} : {response.headers.get('content-type')} : {response.text}"
242+
)
306243

307244
def chat(self, prompt: str, stream: bool = False) -> str:
308245
"""Interact with ChatGPT on the fly

WebChatGPT/utils.py

+3-2
Original file line numberDiff line numberDiff line change
@@ -1,7 +1,7 @@
11
from datetime import datetime, timezone
22
import json
33
import logging
4-
import os
4+
import locale
55
from uuid import uuid4
66
from typing import Any
77
from .errors import CookieExpiredError
@@ -27,6 +27,7 @@
2727
"Sec-Fetch-Mode": "cors",
2828
"Sec-Fetch-Site": "same-origin",
2929
"User-Agent": "Mozilla/5.0 (X11; Linux x86_64; rv:120.0) Gecko/20100101 Firefox/120.0",
30+
"OAI-Language": locale.getlocale()[0].replace("_", "-"),
3031
}
3132

3233
response_example = {
@@ -238,7 +239,7 @@ def generate_payload(self: object, prompt: str) -> dict:
238239
"timezone_offset_min": -180,
239240
"suggestions": [],
240241
"history_and_training_disabled": self.disable_history_and_training,
241-
"arkose_token": None,
242+
# "arkose_token": None,
242243
"conversation_mode": {"kind": "primary_assistant"},
243244
"force_paragen": False,
244245
"force_rate_limit": False,

docs/CHANGELOG.md

+8-1
Original file line numberDiff line numberDiff line change
@@ -159,4 +159,11 @@ More console chat manipulation features.
159159

160160
**What's new?**
161161

162-
- patch: Independencies
162+
- patch: Independencies
163+
164+
## v0.3.0
165+
166+
**What's new?*
167+
168+
- patch : Fail to generate response. Resolves #15 #14
169+
- patch : Drop support for websocket.

docs/README.md

-3
Original file line numberDiff line numberDiff line change
@@ -26,9 +26,6 @@
2626

2727
Unlike the [official Openai library](https://github.com/openai/openai-python), this library makes REST-API calls to [ChatGPT](https://chat.openai.com) via the **browser** endpoints. *No API-KEY required*
2828

29-
> [!CAUTION]
30-
> **Currently** very unreliable!
31-
3229
```python
3330
from WebChatGPT import ChatGPT
3431
bot = ChatGPT(

requirements.txt

+1-2
Original file line numberDiff line numberDiff line change
@@ -3,5 +3,4 @@ python-dotenv==1.0.0
33
click==8.1.3
44
rich==13.3.4
55
clipman==3.1.0
6-
pyperclip==1.8.2
7-
websocket-client==1.7.0
6+
pyperclip==1.8.2

setup.py

+1-2
Original file line numberDiff line numberDiff line change
@@ -12,7 +12,7 @@
1212

1313
setup(
1414
name="webchatgpt",
15-
version="0.2.9",
15+
version="0.3.0",
1616
license="GNU v3",
1717
author="Smartwa",
1818
maintainer="Smartwa",
@@ -41,7 +41,6 @@
4141
"rich==13.3.4",
4242
"clipman==3.1.0",
4343
"pyperclip==1.8.2",
44-
"websocket-client==1.7.0",
4544
],
4645
python_requires=">=3.10",
4746
keywords=[

0 commit comments

Comments
 (0)