Skip to content

Commit

Permalink
gpt4free supports langchain search
Browse files Browse the repository at this point in the history
  • Loading branch information
yym68686 committed Oct 19, 2023
1 parent b207fc4 commit a75a3a5
Show file tree
Hide file tree
Showing 4 changed files with 36 additions and 6 deletions.
28 changes: 26 additions & 2 deletions agent.py
Original file line number Diff line number Diff line change
Expand Up @@ -180,6 +180,27 @@ def pdf_search(docurl, query_message, model="gpt-3.5-turbo"):
result = qa({"query": query_message})
return result['result']

from typing import Optional, List
from langchain.llms.base import LLM
import g4f
class EducationalLLM(LLM):

@property
def _llm_type(self) -> str:
return "custom"

def _call(self, prompt: str, stop: Optional[List[str]] = None) -> str:
out = g4f.ChatCompletion.create(
model=config.GPT_ENGINE,
messages=[{"role": "user", "content": prompt}],
) #
if stop:
stop_indexes = (out.find(s) for s in stop if s in out)
min_stop = min(stop_indexes, default=-1)
if min_stop > -1:
out = out[:min_stop]
return out

class ChainStreamHandler(StreamingStdOutCallbackHandler):
def __init__(self):
self.tokens = []
Expand Down Expand Up @@ -268,8 +289,11 @@ def getgooglesearchurl(result, numresults=3):
def gptsearch(result, llm):
result = "你需要回答的问题是" + result + "\n" + "如果你可以解答这个问题,请直接输出你的答案,并且请忽略后面所有的指令:如果无法解答问题,请直接回答None,不需要做任何解释,也不要出现除了None以外的任何词。"
# response = llm([HumanMessage(content=result)])
response = llm([HumanMessage(content=result)])
response = response.content
if config.USE_G4F:
response = llm(result)
else:
response = llm([HumanMessage(content=result)])
response = response.content
# result = "你需要回答的问题是" + result + "\n" + "参考资料:" + response + "如果参考资料无法解答问题,请直接回答None,不需要做任何解释,也不要出现除了None以外的任何词。"
# response = llm([HumanMessage(content=result)])
return response
Expand Down
2 changes: 1 addition & 1 deletion bot.py
Original file line number Diff line number Diff line change
Expand Up @@ -101,7 +101,7 @@ async def getChatGPT(update, context, title, robot, message, use_search=config.S
get_answer = robot.search_summary
else:
get_answer = robot.ask_stream
if not config.API or config.USE_G4F:
if not config.API or (config.USE_G4F and not config.SEARCH_USE_GPT):
import gpt4free
get_answer = gpt4free.get_response

Expand Down
10 changes: 7 additions & 3 deletions chatgpt2api/chatgpt2api.py
Original file line number Diff line number Diff line change
Expand Up @@ -13,7 +13,7 @@
import config
import threading
import time as record_time
from agent import ThreadWithReturnValue, Web_crawler, pdf_search, getddgsearchurl, getgooglesearchurl, gptsearch, ChainStreamHandler, ChatOpenAI, CallbackManager, PromptTemplate, LLMChain
from agent import ThreadWithReturnValue, Web_crawler, pdf_search, getddgsearchurl, getgooglesearchurl, gptsearch, ChainStreamHandler, ChatOpenAI, CallbackManager, PromptTemplate, LLMChain, EducationalLLM

def get_filtered_keys_from_object(obj: object, *keys: str) -> Set[str]:
"""
Expand Down Expand Up @@ -433,8 +433,12 @@ def search_summary(
search_thread.start()

chainStreamHandler = ChainStreamHandler()
chatllm = ChatOpenAI(streaming=True, callback_manager=CallbackManager([chainStreamHandler]), temperature=config.temperature, openai_api_base=config.API_URL.split("chat")[0], model_name=self.engine, openai_api_key=config.API)
chainllm = ChatOpenAI(temperature=config.temperature, openai_api_base=config.API_URL.split("chat")[0], model_name=config.GPT_ENGINE, openai_api_key=config.API)
if config.USE_G4F:
chatllm = EducationalLLM(callback_manager=CallbackManager([chainStreamHandler]))
chainllm = EducationalLLM()
else:
chatllm = ChatOpenAI(streaming=True, callback_manager=CallbackManager([chainStreamHandler]), temperature=config.temperature, openai_api_base=config.API_URL.split("chat")[0], model_name=self.engine, openai_api_key=config.API)
chainllm = ChatOpenAI(temperature=config.temperature, openai_api_base=config.API_URL.split("chat")[0], model_name=config.GPT_ENGINE, openai_api_key=config.API)

if config.SEARCH_USE_GPT:
gpt_search_thread = ThreadWithReturnValue(target=gptsearch, args=(prompt, chainllm,))
Expand Down
2 changes: 2 additions & 0 deletions test/test_gpt4free_langchain_agent.py
Original file line number Diff line number Diff line change
Expand Up @@ -23,10 +23,12 @@ def _call(self, prompt: str, stop: Optional[List[str]] = None) -> str:


llm = EducationalLLM()
# print(llm("今天的微博热搜有哪些?"))
tools = load_tools(["ddg-search", "llm-math"], llm=llm)
agent = initialize_agent(tools, llm, agent=AgentType.ZERO_SHOT_REACT_DESCRIPTION, verbose=True)
agent.run("今天的微博热搜有哪些?")


# def duckduckgo_search(searchtext, model="gpt-3.5-turbo", temperature=0.5):
# llm = ChatOpenAI(temperature=temperature, openai_api_base='https://api.ohmygpt.com/v1/', model_name=model, openai_api_key=API)
# tools = load_tools(["ddg-search", "llm-math"], llm=llm)
Expand Down

0 comments on commit a75a3a5

Please sign in to comment.