@@ -7,6 +7,8 @@ import { pushRecord, setAbortController } from './shared.mjs'
7
7
import Browser from 'webextension-polyfill'
8
8
import { v4 as uuidv4 } from 'uuid'
9
9
import { t } from 'i18next'
10
+ import { sha3_512 } from 'js-sha3'
11
+ import randomInt from 'random-int'
10
12
11
13
async function request ( token , method , path , data ) {
12
14
const apiUrl = ( await getUserConfig ( ) ) . customChatGptWebApiUrl
@@ -49,12 +51,12 @@ export async function getModels(token) {
49
51
if ( response . models ) return response . models . map ( ( m ) => m . slug )
50
52
}
51
53
52
- export async function getRequirementsToken ( accessToken ) {
54
+ export async function getRequirements ( accessToken ) {
53
55
const response = JSON . parse (
54
56
( await request ( accessToken , 'POST' , '/sentinel/chat-requirements' ) ) . responseText ,
55
57
)
56
- if ( response . token ) {
57
- return response . token
58
+ if ( response ) {
59
+ return response
58
60
}
59
61
}
60
62
@@ -91,6 +93,38 @@ export async function getArkoseToken(config) {
91
93
return arkoseToken
92
94
}
93
95
96
+ // https://github.com/tctien342/chatgpt-proxy/blob/9147a4345b34eece20681f257fd475a8a2c81171/src/openai.ts#L103
97
+ function generateProofToken ( seed , diff , userAgent ) {
98
+ const cores = [ 8 , 12 , 16 , 24 ]
99
+ const screens = [ 3000 , 4000 , 6000 ]
100
+
101
+ const core = cores [ randomInt ( 0 , cores . length ) ]
102
+ const screen = screens [ randomInt ( 0 , screens . length ) ]
103
+
104
+ const parseTime = new Date ( ) . toString ( )
105
+
106
+ const config = [ core + screen , parseTime , 4294705152 , 0 , userAgent ]
107
+
108
+ const diffLen = diff . length / 2
109
+
110
+ for ( let i = 0 ; i < 100000 ; i ++ ) {
111
+ config [ 3 ] = i
112
+ const jsonData = JSON . stringify ( config )
113
+ // eslint-disable-next-line no-undef
114
+ const base = Buffer . from ( jsonData ) . toString ( 'base64' )
115
+ const hashValue = sha3_512 . create ( ) . update ( seed + base )
116
+
117
+ if ( hashValue . hex ( ) . substring ( 0 , diffLen ) <= diff ) {
118
+ const result = 'gAAAAAB' + base
119
+ return result
120
+ }
121
+ }
122
+
123
+ // eslint-disable-next-line no-undef
124
+ const fallbackBase = Buffer . from ( `"${ seed } "` ) . toString ( 'base64' )
125
+ return 'gAAAAABwQ8Lk5FbGpA2NcR9dShT6gYjU7VxZ4D' + fallbackBase
126
+ }
127
+
94
128
export async function isNeedWebsocket ( accessToken ) {
95
129
return ( await request ( accessToken , 'GET' , '/accounts/check/v4-2023-04-27' ) ) . responseText . includes (
96
130
'shared_websocket' ,
@@ -167,9 +201,9 @@ export async function generateAnswersWithChatgptWebApi(port, question, session,
167
201
168
202
const config = await getUserConfig ( )
169
203
let arkoseError
170
- const [ models , requirementsToken , arkoseToken , useWebsocket ] = await Promise . all ( [
204
+ const [ models , requirements , arkoseToken , useWebsocket ] = await Promise . all ( [
171
205
getModels ( accessToken ) . catch ( ( ) => undefined ) ,
172
- getRequirementsToken ( accessToken ) . catch ( ( ) => undefined ) ,
206
+ getRequirements ( accessToken ) . catch ( ( ) => undefined ) ,
173
207
getArkoseToken ( config ) . catch ( ( e ) => {
174
208
arkoseError = e
175
209
} ) ,
@@ -180,9 +214,17 @@ export async function generateAnswersWithChatgptWebApi(port, question, session,
180
214
const usedModel =
181
215
models && models . includes ( selectedModel ) ? selectedModel : Models [ chatgptWebModelKeys [ 0 ] ] . value
182
216
console . debug ( 'usedModel' , usedModel )
183
- const needArkoseToken = ! usedModel . includes ( Models [ chatgptWebModelKeys [ 0 ] ] . value )
217
+ const needArkoseToken = requirements && requirements . arkose ?. required
184
218
if ( arkoseError && needArkoseToken ) throw arkoseError
185
219
220
+ let proofToken
221
+ if ( requirements ?. proofofwork ?. required ) {
222
+ proofToken = generateProofToken (
223
+ requirements . proofofwork . seed ,
224
+ requirements . proofofwork . difficulty ,
225
+ )
226
+ }
227
+
186
228
let cookie
187
229
let oaiDeviceId
188
230
if ( Browser . cookies && Browser . cookies . getAll ) {
@@ -213,8 +255,9 @@ export async function generateAnswersWithChatgptWebApi(port, question, session,
213
255
'Content-Type' : 'application/json' ,
214
256
Authorization : `Bearer ${ accessToken } ` ,
215
257
...( cookie && { Cookie : cookie } ) ,
216
- 'Openai-Sentinel-Arkose-Token' : arkoseToken || '' ,
217
- 'Openai-Sentinel-Chat-Requirements-Token' : requirementsToken || '' ,
258
+ ...( needArkoseToken && { 'Openai-Sentinel-Arkose-Token' : arkoseToken } ) ,
259
+ ...( requirements && { 'Openai-Sentinel-Chat-Requirements-Token' : requirements . token } ) ,
260
+ ...( proofToken && { 'Openai-Sentinel-Proof-Token' : proofToken } ) ,
218
261
'Oai-Device-Id' : oaiDeviceId ,
219
262
'Oai-Language' : 'en-US' ,
220
263
} ,
0 commit comments