forked from RomanHotsiy/commitgpt
-
Notifications
You must be signed in to change notification settings - Fork 0
/
Copy pathclient.ts
122 lines (106 loc) · 3.45 KB
/
client.ts
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
119
120
121
122
// Adapted from: https://github.com/wong2/chat-gpt-google-extension/blob/main/background/index.mjs
import { createParser } from 'eventsource-parser';
import { v4 as uuidv4 } from 'uuid';
import ExpiryMap from 'expiry-map';
import fetch, { Response } from 'node-fetch';
import { OpenAIApi, Configuration } from 'openai';
const configuration = new Configuration({
apiKey: process.env.OPENAI_API_KEY,
});
const openai = new OpenAIApi(configuration);
export type ClientConfig = {
sessionToken: string;
};
const KEY_ACCESS_TOKEN = 'accessToken';
const USER_AGENT =
'Mozilla/5.0 (Macintosh; Intel Mac OS X 10_15_7) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/107.0.0.0 Safari/537.36';
const cache = new ExpiryMap(10 * 1000);
export async function refreshAccessToken(sessionToken: string) {
if (cache.get(KEY_ACCESS_TOKEN)) {
return cache.get(KEY_ACCESS_TOKEN);
}
const resp = await fetch('https://chat.openai.com/api/auth/session', {
headers: {
'User-Agent': USER_AGENT,
cookie: '__Secure-next-auth.session-token=' + sessionToken,
},
})
.then(r => r.json() as any)
.catch(() => ({}));
if (!resp.accessToken) {
throw new Error('Unauthorized');
}
cache.set(KEY_ACCESS_TOKEN, resp.accessToken);
return resp.accessToken;
}
export class ChatGPTClient {
constructor(public config: ClientConfig, public conversationId: string = uuidv4()) {}
async ensureAuth() {
await refreshAccessToken(this.config.sessionToken);
}
async getAnswer(question: string): Promise<string> {
const accessToken = await refreshAccessToken(this.config.sessionToken);
let response = '';
return new Promise((resolve, reject) => {
openai.createCompletion({
model: "text-davinci-002-render",
prompt: question,
max_tokens: 2048,
temperature: 0.5,
});
// fetchSSE('https://chat.openai.com/backend-api/conversation', {
// method: 'POST',
// headers: {
// 'User-Agent': USER_AGENT,
// 'Content-Type': 'application/json',
// Authorization: `Bearer ${accessToken}`,
// },
// body: JSON.stringify({
// action: 'next',
// messages: [
// {
// id: uuidv4(),
// role: 'user',
// content: {
// content_type: 'text',
// parts: [question],
// },
// },
// ],
// model: 'text-davinci-002-render',
// parent_message_id: this.conversationId,
// }),
// onMessage: (message: string) => {
// if (message === '[DONE]') {
// return resolve(response);
// }
// const data = JSON.parse(message);
// const text = data.message?.content?.parts?.[0];
// if (text) {
// response = text;
// }
// },
// }).catch(reject);
});
}
}
async function fetchSSE(resource, options) {
const { onMessage, ...fetchOptions } = options;
const resp = await fetch(resource, fetchOptions);
if (!resp.ok) {
const err = new Error(resp.statusText);
(err as any).details = await resp.text(); // quick hack to persist the error details
throw err;
}
const parser = createParser(event => {
if (event.type === 'event') {
onMessage(event.data);
}
});
resp.body.on('readable', () => {
let chunk;
while (null !== (chunk = resp.body.read())) {
parser.feed(chunk.toString());
}
});
}