Langchain agent SerpAPI and Local LLM to search Web

135 Views Asked by At

I have been struggling to get SerpAPI working with local LLMs. All the examples I found are using openAI.

Here is how I tried to make it work. Can you please suggest a solution.

from langchain import OpenAI, SerpAPIWrapper
from langchain.agents import initialize_agent, Tool
from langchain.agents import AgentType
from langchain.llms import HuggingFacePipeline
from google.colab import userdata

# you can define a different llm
model_id = "microsoft/phi-2"
local_llm = HuggingFacePipeline.from_model_id(
    model_id= model_id, task="text-generation", pipeline_kwargs={"max_new_tokens": 60},
)
#llm = OpenAI(temperature=0)
serpapi_api_key = userdata.get('SERPAPI_API_KEY')
search = SerpAPIWrapper(serpapi_api_key = serpapi_api_key)
tools = [
    Tool(
        name="Intermediate Answer",
        func=search.run,
        description="useful for when you need to ask with search",
    )
]

self_ask_with_search = initialize_agent(
    tools, local_llm, agent=AgentType.SELF_ASK_WITH_SEARCH, verbose=True
)
self_ask_with_search.run(
    "What is the hometown of the reigning men's U.S. Open champion?"
)

and a different version as

from google.colab import userdata
from langchain.llms import HuggingFacePipeline
from langchain_community.utilities import SerpAPIWrapper
from langchain.agents import load_tools, initialize_agent
from langchain.agents import AgentType

serpapi_api_key = userdata.get('SERPAPI_API_KEY')
tools = load_tools(["serpapi"], serpapi_api_key = serpapi_api_key)

model_id = 'mistralai/Mistral-7B-Instruct-v0.2'
model_id = "microsoft/phi-2"
local_llm = HuggingFacePipeline.from_model_id(
    model_id= model_id, task="text-generation", pipeline_kwargs={"max_new_tokens": 60},
)
#

agent = initialize_agent(tools, local_llm, agent="zero-shot-react-description", verbose=True)
agent.run("who is ceo of Amazon")

This is the last error I got

0

There are 0 best solutions below