Skip to content
This repository has been archived by the owner on Jan 22, 2024. It is now read-only.

Commit

Permalink
增加对claude和bing的支持
Browse files Browse the repository at this point in the history
  • Loading branch information
XiaoXinYo committed Jan 14, 2024
1 parent cc61936 commit 8c8253b
Show file tree
Hide file tree
Showing 9 changed files with 317 additions and 66 deletions.
12 changes: 5 additions & 7 deletions README.md
Original file line number Diff line number Diff line change
@@ -1,22 +1,20 @@
## 提示
1. 升级咯,此仓库前身为Bing-Chat,现已更名为Chat-WebAPI,支持多种Chat,现已支持Bard,ChatGPT,文心一言.
2. 若报错,请先将bardapi,requests,easy-ernie更新到最新版本.
3. 已知文心一言有封号风险.
4. Bing Chat已被暂时移除.
1. 升级咯,此仓库前身为Bing-Chat,现已更名为Chat-WebAPI,支持多种Chat,现已支持Bard,Bing,ChatGPT,Claude,文心一言.
2. 已知Bing,文心一言有封号风险.
---
![Release](https://img.shields.io/badge/Release-0.1.7-blue)
![Release](https://img.shields.io/badge/Release-0.1.8-blue)
---
## 介绍
一款基于Python-FastAPI框架,开发的多种Chat WebAPI程序.
## 需求
1. 平台: Windows/Linux/Docker.
2. 语言: Python3.8+.
3. 其他: Bard账户,ChatGPT密钥,文心一言账户.
3. 其他: Bard账户,Bing账户,ChatGPT密钥,Claude账户,文心一言账户.
## 配置
查看config.py文件.
## Cookie
1. 浏览器安装Cookie-Editor扩展.
2. 访问[Bard](https://bard.google.com)/[文心一言](https://yiyan.baidu.com).
2. 访问[Bard](https://bard.google.com/)/[Bing](https://www.bing.com/chat)/[Claude](https://claude.ai/)/[文心一言](https://yiyan.baidu.com/).
3. 在页面中点击扩展.
4. 点击扩展右下角的Export-Export as JSON
5. 将复制的内容粘贴到对应的Cookie文件(cookie/)中.
Expand Down
33 changes: 33 additions & 0 deletions doc/wiki/Claude.md
Original file line number Diff line number Diff line change
@@ -0,0 +1,33 @@
## Ask
### 请求
1. 网址: /claude/ask.
2. 方式: GET/POST.
3. 格式: JSON(当方式为POST时).
4. 参数:

名称|类型|必填|说明
---|---|---|---
question|String|是|
token|String|否|填则为连续对话,不填则为新对话,值可在响应中获取
### 响应
1. 格式: JSON.
2. 参数:

名称|类型|说明
---|---|---
code|Integer|
message|String|
data|Object|
answer|String|
token|String|用于连续对话
3. 示例:
```json
{
"code": 200,
"message": "success",
"data": {
"answer": "你好,我是Claude。",
"token": "3153d67b-eac2-457e-a2ee-fedc8ba53588"
}
}
```
41 changes: 24 additions & 17 deletions main.py
Original file line number Diff line number Diff line change
@@ -1,47 +1,54 @@
from fastapi import FastAPI, Request, Response
from fastapi.middleware.cors import CORSMiddleware
from view import bard, chatgpt, ernie
from contextlib import asynccontextmanager
from view import bard, bing, chatgpt, claude, ernie
from module import chat, core
import asyncio
import config
import uvicorn

APP = FastAPI()
@asynccontextmanager
async def lifespan(app: FastAPI):
asyncio.create_task(chat.check())
yield
await chat.check(loop=False)

APP = FastAPI(lifespan=lifespan)
APP.add_middleware(
CORSMiddleware,
allow_origins=['*'],
allow_credentials=True,
allow_methods=['*'],
allow_headers=['*'],
)
APP.include_router(bard.Bard_APP, prefix='/bard')
APP.include_router(bard.BARD_APP, prefix='/bard')
APP.include_router(bing.BING_APP, prefix='/bing')
APP.include_router(chatgpt.CHATGPT_APP, prefix='/chatgpt')
APP.include_router(claude.CLAUDE_APP, prefix='/claude')
APP.include_router(ernie.ERNIE_APP, prefix='/ernie')

@APP.on_event('startup')
async def startup() -> None:
asyncio.create_task(chat.check())

@APP.on_event('shutdown')
async def shutdown() -> None:
asyncio.create_task(chat.check(loop=False))

@APP.middleware('http')
async def middleware(request: Request, call_next) -> None:
urls = request.url.path.split('/')
if len(urls) == 3:
model = urls[1]
mode = urls[2]
if mode == 'ask':
generate = lambda model_: core.GenerateResponse().error(100, f'{model_}未配置')
generate = lambda model: core.GenerateResponse().error(100, f'{model}未配置')
else:
generate = lambda model_: core.GenerateResponse().error(100, f'{model_}未配置', streamResponse=True)
generate = lambda model: core.GenerateResponse().error(100, f'{model}未配置', streamResponse=True)
if model == 'bard':
if not chat.BARD_COOKIE:
return generate('Bard')
elif model == 'bing':
if not chat.BING_COOKIE:
return generate('Bing')
elif model == 'chatgpt':
if not config.CHATGPT_KEY:
return generate('ChatGPT')
elif model == 'claude':
if not chat.CLAUDE_COOKIE:
return generate('Claude')
elif model == 'ernie':
if not chat.ERNIE_COOKIE:
return generate('文心一言')
Expand All @@ -59,10 +66,10 @@ def error500(request: Request, exc: Exception) -> Response:

if __name__ == '__main__':
appConfig = {
'host': config.HOST,
'port': config.PORT,
'host': config.HTTP['host'],
'port': config.HTTP['port']
}
if config.SSL['enable']:
uvicorn.run(APP, **appConfig, ssl_keyfile=config.SSL['keyPath'], ssl_certfile=config.SSL['certPath'])
if config.HTTP['ssl']['enable']:
uvicorn.run(APP, **appConfig, ssl_keyfile=config.HTTP['ssl']['keyPath'], ssl_certfile=config.HTTP['ssl']['certPath'])
else:
uvicorn.run(APP, **appConfig)
91 changes: 58 additions & 33 deletions module/chat.py
Original file line number Diff line number Diff line change
Expand Up @@ -3,74 +3,99 @@
from module import chatgpt
import enum
import bardapi
import re_edge_gpt
import claude2_api
import easy_ernie
import config
import asyncio
import uuid
import json
import dataclasses

CHAT = {}
CHATS = []

BARD_COOKIE = auxiliary.getCookie('./cookie/bard.json', all=True)
ERNIE_COOKIE = auxiliary.getCookie('./cookie/ernie.json', names=['BAIDUID', 'BDUSS_BFESS'])
BARD_COOKIE = auxiliary.getCookie('./cookie/bard.json')
with open('./cookie/bing.json', 'r') as file:
BING_COOKIE = json.load(file)
CLAUDE_COOKIE = auxiliary.getCookie('./cookie/claude.json', dict_=False)
ERNIE_COOKIE = auxiliary.getCookie('./cookie/ernie.json')

class Type(enum.Enum):
BARD = 'Bard'
BING = 'Bing'
CHATGPT = 'ChatGPT'
CLAUDE = 'Claude'
ERNIE = 'Ernie'

@dataclasses.dataclass
class Chat:
type_: Type
token: str
bot: object
parameter: dict
timestamp: int

def __init__(self, type_: Type, bot: object, timestamp: int, parameter: dict=None) -> None:
self.type_ = type_
self.bot = bot
self.parameter = parameter
self.timestamp = timestamp

def generate(type_: Type, parameter: dict=None) -> Optional[tuple]:
global CHAT
token = str(uuid.uuid4())
async def generate(type_: Type, parameter: dict={}) -> Optional[tuple]:
global CHATS
addressPortProxy = f'{config.PROXY["host"]}:{config.PROXY["port"]}' if config.PROXY['enable'] else None
proxy = {
'http': f'http://{config.PROXY}/',
'https': f'https://{config.PROXY}/'
} if config.PROXY else None
'http': f'http://{addressPortProxy}/',
'https': f'https://{addressPortProxy}/'
} if config.PROXY['enable'] else None
if type_ == Type.BARD:
bot = bardapi.BardCookies(cookie_dict=BARD_COOKIE, proxies=proxy)
elif type_ == Type.BING:
bot = await re_edge_gpt.Chatbot.create(proxy=addressPortProxy, cookies=BING_COOKIE)
elif type_ == Type.CHATGPT:
bot = chatgpt.ChatGPT(config.CHATGPT_KEY, proxy=proxy)
elif type_ == Type.CLAUDE:
claudeSession = claude2_api.SessionData(CLAUDE_COOKIE, 'Mozilla/5.0 (Windows NT 10.0; Win64; x64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/120.0.0.0 Safari/537.36 Edg/120.0.0.0')
claudeProxy = claude2_api.client.HTTPProxy(
config.PROXY['host'],
config.PROXY['port'],
use_ssl=config.PROXY['ssl']
) if config.PROXY['enable'] else None
bot = claude2_api.ClaudeAPIClient(claudeSession, proxy=claudeProxy)
parameter['chatId'] = bot.create_chat()
elif type_ == Type.ERNIE:
bot = easy_ernie.FastErnie(ERNIE_COOKIE['BAIDUID'], ERNIE_COOKIE['BDUSS_BFESS'])
else:
return None
CHAT[token] = Chat(type_, bot, auxiliary.getTimestamp(), parameter=parameter)
token = str(uuid.uuid4())
CHATS.append(Chat(type_, token, bot, parameter, auxiliary.getTimestamp()))
return token, bot

def get(token: str) -> Optional[Chat]:
global CHAT
if token in CHAT:
CHAT[token].timestamp = auxiliary.getTimestamp()
return CHAT[token]
def get(type_: Type, token: str) -> Optional[Chat]:
global CHATS
for chat in CHATS:
if chat.type_ == type_ and chat.token == token:
return chat
return None

def update(token, parameter: dict) -> None:
global CHAT
if token in CHAT:
CHAT[token].parameter = parameter
global CHATS
for chat in CHATS:
if chat.token == token:
chat.parameter = parameter
break

async def check(loop=True) -> None:
global CHAT
global CHATS
while True:
for token in CHAT.copy():
chat = CHAT[token]
if auxiliary.getTimestamp() - chat.timestamp > config.TOKEN_USE_MAX_TIME_INTERVAL * 60:
if chat.type_ == Type.BARD:
await chat.bot.close()
elif chat.type_ == Type.ERNIE:
chat.bot.close()
del CHAT[token]
for chat in CHATS.copy():
if loop and auxiliary.getTimestamp() - chat.timestamp <= config.TOKEN_USE_MAX_TIME_INTERVAL * 60:
continue

if chat.type_ == Type.BARD:
await chat.bot.close()
elif chat.type_ == Type.BING:
chat.bot.close()
elif chat.type_ == Type.CLAUDE:
chat.bot.delete_chat(chat.parameter['chatId'])
elif chat.type_ == Type.ERNIE:
chat.bot.close()
CHATS.remove(chat)

if loop:
await asyncio.sleep(60)
else:
Expand Down
7 changes: 3 additions & 4 deletions module/chatgpt.py
Original file line number Diff line number Diff line change
Expand Up @@ -22,16 +22,14 @@ def __init__(self, apiKey: str, apiUrl: str='https://api.openai.com', proxy: Opt

def checkJson(self, data: str) -> None:
try:
print(data)
data = json.loads(data)
except:
raise Exception('请求失败,响应格式错误')

if 'error' in data:
raise Exception(f'请求失败,{data["error"]["message"]}')

def request(self, method: str, url: str, data: Optional[dict] = None, stream=False,
check=True) -> requests.Response:
def request(self, method: str, url: str, data: Optional[dict] = None, stream=False, check=True) -> requests.Response:
if method == 'get':
self.response = self.session.get(url, params=data, stream=stream)
else:
Expand Down Expand Up @@ -82,7 +80,8 @@ def askStream(self, model: str, question: str) -> Generator:
data = line[6:]
data = json.loads(data)
choices = data['choices'][0]
if answer := choices['delta'].get('content'):
answer = choices['delta'].get('content')
if answer:
fullAnswer += answer
yield {
'answer': answer,
Expand Down
8 changes: 4 additions & 4 deletions module/core.py
Original file line number Diff line number Diff line change
Expand Up @@ -3,12 +3,12 @@
import json

async def getRequestParameter(request: Request) -> dict:
data = {}
result = {}
if request.method == 'GET':
data = request.query_params
result = request.query_params
elif request.method == 'POST':
data = await request.json()
return dict(data)
result = await request.json()
return dict(result)

class GenerateResponse:
TYPE = Union[str, Response]
Expand Down
3 changes: 2 additions & 1 deletion requirements.txt
Original file line number Diff line number Diff line change
@@ -1,8 +1,9 @@
fastapi
uvicorn
gevent
gunicorn
asyncio
bardapi
re_edge_gpt
requests
unofficial-claude2-api
easy-ernie
Loading

0 comments on commit 8c8253b

Please sign in to comment.