Skip to content

Commit

Permalink
upgrade chatgpt web (proofToken) (josStorer#677, josStorer#678, josSt…
Browse files Browse the repository at this point in the history
  • Loading branch information
josStorer committed May 2, 2024
1 parent 1fe4bef commit 77a14e2
Show file tree
Hide file tree
Showing 3 changed files with 71 additions and 8 deletions.
18 changes: 18 additions & 0 deletions package-lock.json

Some generated files are not rendered by default. Learn more about how customized files appear on GitHub.

2 changes: 2 additions & 0 deletions package.json
Original file line number Diff line number Diff line change
Expand Up @@ -32,6 +32,7 @@
"gpt-3-encoder": "^1.1.4",
"graphql": "^16.6.0",
"i18next": "^22.4.15",
"js-sha3": "^0.9.3",
"jsonwebtoken": "8.5.1",
"katex": "^0.16.6",
"lodash-es": "^4.17.21",
Expand All @@ -40,6 +41,7 @@
"preact": "^10.13.2",
"process": "^0.11.10",
"prop-types": "^15.8.1",
"random-int": "^3.0.0",
"react": "npm:@preact/compat@^17.1.2",
"react-bootstrap-icons": "^1.10.3",
"react-dom": "npm:@preact/compat@^17.1.2",
Expand Down
59 changes: 51 additions & 8 deletions src/services/apis/chatgpt-web.mjs
Original file line number Diff line number Diff line change
Expand Up @@ -7,6 +7,8 @@ import { pushRecord, setAbortController } from './shared.mjs'
import Browser from 'webextension-polyfill'
import { v4 as uuidv4 } from 'uuid'
import { t } from 'i18next'
import { sha3_512 } from 'js-sha3'
import randomInt from 'random-int'

async function request(token, method, path, data) {
const apiUrl = (await getUserConfig()).customChatGptWebApiUrl
Expand Down Expand Up @@ -49,12 +51,12 @@ export async function getModels(token) {
if (response.models) return response.models.map((m) => m.slug)
}

export async function getRequirementsToken(accessToken) {
export async function getRequirements(accessToken) {
const response = JSON.parse(
(await request(accessToken, 'POST', '/sentinel/chat-requirements')).responseText,
)
if (response.token) {
return response.token
if (response) {
return response
}
}

Expand Down Expand Up @@ -91,6 +93,38 @@ export async function getArkoseToken(config) {
return arkoseToken
}

// https://github.com/tctien342/chatgpt-proxy/blob/9147a4345b34eece20681f257fd475a8a2c81171/src/openai.ts#L103
function generateProofToken(seed, diff, userAgent) {
const cores = [8, 12, 16, 24]
const screens = [3000, 4000, 6000]

const core = cores[randomInt(0, cores.length)]
const screen = screens[randomInt(0, screens.length)]

const parseTime = new Date().toString()

const config = [core + screen, parseTime, 4294705152, 0, userAgent]

const diffLen = diff.length / 2

for (let i = 0; i < 100000; i++) {
config[3] = i
const jsonData = JSON.stringify(config)
// eslint-disable-next-line no-undef
const base = Buffer.from(jsonData).toString('base64')
const hashValue = sha3_512.create().update(seed + base)

if (hashValue.hex().substring(0, diffLen) <= diff) {
const result = 'gAAAAAB' + base
return result
}
}

// eslint-disable-next-line no-undef
const fallbackBase = Buffer.from(`"${seed}"`).toString('base64')
return 'gAAAAABwQ8Lk5FbGpA2NcR9dShT6gYjU7VxZ4D' + fallbackBase
}

export async function isNeedWebsocket(accessToken) {
return (await request(accessToken, 'GET', '/accounts/check/v4-2023-04-27')).responseText.includes(
'shared_websocket',
Expand Down Expand Up @@ -167,9 +201,9 @@ export async function generateAnswersWithChatgptWebApi(port, question, session,

const config = await getUserConfig()
let arkoseError
const [models, requirementsToken, arkoseToken, useWebsocket] = await Promise.all([
const [models, requirements, arkoseToken, useWebsocket] = await Promise.all([
getModels(accessToken).catch(() => undefined),
getRequirementsToken(accessToken).catch(() => undefined),
getRequirements(accessToken).catch(() => undefined),
getArkoseToken(config).catch((e) => {
arkoseError = e
}),
Expand All @@ -180,9 +214,17 @@ export async function generateAnswersWithChatgptWebApi(port, question, session,
const usedModel =
models && models.includes(selectedModel) ? selectedModel : Models[chatgptWebModelKeys[0]].value
console.debug('usedModel', usedModel)
const needArkoseToken = !usedModel.includes(Models[chatgptWebModelKeys[0]].value)
const needArkoseToken = requirements && requirements.arkose?.required
if (arkoseError && needArkoseToken) throw arkoseError

let proofToken
if (requirements?.proofofwork?.required) {
proofToken = generateProofToken(
requirements.proofofwork.seed,
requirements.proofofwork.difficulty,
)
}

let cookie
let oaiDeviceId
if (Browser.cookies && Browser.cookies.getAll) {
Expand Down Expand Up @@ -213,8 +255,9 @@ export async function generateAnswersWithChatgptWebApi(port, question, session,
'Content-Type': 'application/json',
Authorization: `Bearer ${accessToken}`,
...(cookie && { Cookie: cookie }),
'Openai-Sentinel-Arkose-Token': arkoseToken || '',
'Openai-Sentinel-Chat-Requirements-Token': requirementsToken || '',
...(needArkoseToken && { 'Openai-Sentinel-Arkose-Token': arkoseToken }),
...(requirements && { 'Openai-Sentinel-Chat-Requirements-Token': requirements.token }),
...(proofToken && { 'Openai-Sentinel-Proof-Token': proofToken }),
'Oai-Device-Id': oaiDeviceId,
'Oai-Language': 'en-US',
},
Expand Down

0 comments on commit 77a14e2

Please sign in to comment.