Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

feat: add LinkupTool, a tool for LangChain #1

Merged
merged 3 commits into from
Nov 25, 2024
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension


Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
7 changes: 6 additions & 1 deletion .pre-commit-config.yaml
Original file line number Diff line number Diff line change
Expand Up @@ -9,8 +9,13 @@ repos:
- id: trailing-whitespace

- repo: https://github.com/astral-sh/ruff-pre-commit
rev: v0.7.4
rev: v0.8.0
hooks:
- id: ruff
args: [--fix]
- id: ruff-format

- repo: https://github.com/gitleaks/gitleaks
rev: v8.21.2
hooks:
- id: gitleaks
5 changes: 2 additions & 3 deletions examples/1_rag.py
Original file line number Diff line number Diff line change
Expand Up @@ -26,12 +26,11 @@
query: str = "What is Linkup, the new French AI startup?"
linkup_depth: Literal["standard", "deep"] = "standard"
linkup_api_key = None
open_ai_model: str = "gpt-4o-mini"
openai_model: str = "gpt-4o-mini"
openai_api_key = None

load_dotenv() # Load environment variables from .env file if there is one


retriever = LinkupRetriever(linkup_api_key=linkup_api_key, depth=linkup_depth)


Expand Down Expand Up @@ -59,7 +58,7 @@ def inspect_context(state: dict[str, Any]) -> dict[str, Any]:
Question: {question}
"""
prompt = ChatPromptTemplate.from_template(generation_prompt_template)
model = ChatOpenAI(model=open_ai_model, api_key=openai_api_key)
model = ChatOpenAI(model=openai_model, api_key=openai_api_key)


chain: Runnable[Any, str] = (
Expand Down
42 changes: 42 additions & 0 deletions examples/2_search_agent.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,42 @@
"""Simple agent example using the Linkup API and LangChain's agent framework.

This example is adapted from:
https://python.langchain.com/docs/tutorials/agents/

For this example to work, you need few additional dependencies, all specified in the
`requirements-dev.txt` file (you can run `pip install -r requirements-dev.txt` to install them).

Additionally, you need an API key for Linkup, and another one for OpenAI (for the base agent model),
which you can set manually as the LINKUP_API_KEY and OPENAI_API_KEY environment variables, or you
can duplicate the file `.env.example` in a `.env` file, fill the missing values, and the environment
variables will be automatically loaded from it, or you can replace the corresponding variables
below.
"""

from typing import Literal

from dotenv import load_dotenv
from langchain_core.messages import HumanMessage
from langchain_openai import ChatOpenAI
from langgraph.prebuilt import create_react_agent

from langchain_linkup import LinkupTool

# You can change the RAG query and parameters here. If you prefer not to use environment variables
# you can fill them here.
query: str = "What's the weather like in Paris, London and Berlin?"
linkup_depth: Literal["standard", "deep"] = "standard"
linkup_api_key = None
openai_model: str = "gpt-4o-mini"
openai_api_key = None

load_dotenv() # Load environment variables from .env file if there is one

model = ChatOpenAI(model=openai_model, api_key=openai_api_key)
search_tool = LinkupTool(depth="standard", output_type="searchResults")
agent_executor = create_react_agent(model=model, tools=[search_tool])

# Use the agent
for chunk in agent_executor.stream(input=dict(messages=[HumanMessage(content=query)])):
print(chunk)
print("----")
2 changes: 2 additions & 0 deletions langchain_linkup/__init__.py
Original file line number Diff line number Diff line change
@@ -1,5 +1,7 @@
from .retriever import LinkupRetriever
from .tool import LinkupTool

__all__ = [
"LinkupRetriever",
"LinkupTool",
]
5 changes: 0 additions & 5 deletions langchain_linkup/retriever.py
Original file line number Diff line number Diff line change
Expand Up @@ -16,11 +16,6 @@ class LinkupRetriever(BaseRetriever):
the Linkup API sources, that is the web and the Linkup Premium Partner sources.
"""

# NOTE: we could want to make the LinkupClient a class attribute, but we would need to make it
# serializable for this to work, as langchain_core.retrievers.BaseRetriever inherits from
# langchain_core.load.Serializable. There's no real overhead in creating a new LinkupClient
# instance for each query, so we don't need to do this at the moment.

depth: Literal["standard", "deep"]
"""The depth of the search. Can be either "standard", for a straighforward and fast search, or
"deep" for a more powerful agentic workflow."""
Expand Down
72 changes: 72 additions & 0 deletions langchain_linkup/tool.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,72 @@
from typing import Any, Literal, Optional, Type, Union

from langchain_core.callbacks import (
AsyncCallbackManagerForToolRun,
CallbackManagerForToolRun,
)
from langchain_core.tools import BaseTool
from linkup import LinkupClient
from pydantic import BaseModel, Field


class LinkupInput(BaseModel):
query: str = Field(description="The search query.")


class LinkupTool(BaseTool):
"""A tool to query the Linkup API in agentic workflows.

This tool is a wrapper around the Linkup API, allowing you to search for documents from the
Linkup API sources, that is the web and the Linkup Premium Partner sources.
"""

depth: Literal["standard", "deep"]
"""The depth of the search. Can be either "standard", for a straighforward and
fast search, or "deep" for a more powerful agentic workflow."""
output_type: Literal["searchResults", "sourcedAnswer", "structured"]
"""The type of output which is expected: "searchResults" will output raw
search results, "sourcedAnswer" will output the answer to the query and sources
supporting it, and "structured" will base the output on the format provided in
structured_output_schema."""
linkup_api_key: Optional[str] = None
"""The API key for the Linkup API. If None, the API key will be read from the environment
variable `LINKUP_API_KEY`."""
structured_output_schema: Union[Type[BaseModel], str, None] = None
"""If output_type is "structured", specify the schema of the
output. Supported formats are a pydantic.BaseModel or a string representing a
valid object JSON schema."""

# Fields used by the agent to describe how to use the tool under the hood
name: str = "linkup"
description: str = (
"A tool to search for information using the Linkup API, that is from the "
"web and Linkup Premium Partner sources. "
)
args_schema: Type[BaseModel] = LinkupInput
return_direct: bool = False

def _run(
self,
query: str,
run_manager: Optional[CallbackManagerForToolRun] = None,
) -> Any:
client = LinkupClient(api_key=self.linkup_api_key)
return client.search(
query=query,
depth=self.depth,
output_type=self.output_type,
structured_output_schema=self.structured_output_schema,
)

async def _arun(
self,
query: str,
run_manager: Optional[AsyncCallbackManagerForToolRun] = None,
) -> Any:
client = LinkupClient(api_key=self.linkup_api_key)
return await client.async_search(
query=query,
depth=self.depth,
output_type=self.output_type,
structured_output_schema=self.structured_output_schema,
)
1 change: 1 addition & 0 deletions pyproject.toml
Original file line number Diff line number Diff line change
Expand Up @@ -4,6 +4,7 @@ strict = true

[tool.pytest.ini_options]
asyncio_default_fixture_loop_scope = "function"
asyncio_mode = "auto" # Prevent some async tests in langchain-tests to be skipped

[tool.coverage.report]
exclude_also = ["raise ValueError", "raise TypeError"]
Expand Down
2 changes: 2 additions & 0 deletions requirements-dev.txt
Original file line number Diff line number Diff line change
@@ -1,4 +1,6 @@
langchain-openai
langchain-tests==0.3.4
langgraph
load-dotenv
mypy
pre-commit
Expand Down
2 changes: 1 addition & 1 deletion setup.py
Original file line number Diff line number Diff line change
Expand Up @@ -6,7 +6,7 @@

setup(
name="langchain-linkup",
version="0.1.0",
version="0.1.1",
author="LINKUP TECHNOLOGIES",
author_email="[email protected]",
description="A Langchain integration for the Linkup API",
Expand Down
33 changes: 33 additions & 0 deletions tests/integration_tests/test_tool.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,33 @@
import os
from typing import Any, Optional, Type

from dotenv import load_dotenv
from langchain_tests.integration_tests import ToolsIntegrationTests

from langchain_linkup import LinkupTool


class TestLinkupToolIntegration(ToolsIntegrationTests):
@property
def tool_constructor(self) -> Type[LinkupTool]:
return LinkupTool

@property
def tool_constructor_params(self) -> dict[str, Any]:
# Due to the way the tests are set up (with properties), we can't use the `linkup_api_key`
# fixture
load_dotenv() # Load environment variables from .env file if it exists
linkup_api_key: Optional[str] = os.environ.get("LINKUP_API_KEY")
if linkup_api_key is None:
raise ValueError("LINKUP_API_KEY environment variable is not set.")
return dict(
depth="standard",
output_type="searchResults",
api_key=linkup_api_key,
)

@property
def tool_invoke_params_example(self) -> dict[str, Any]:
return dict(
query="What's the weather like in Paris, London and Berlin?",
)
24 changes: 24 additions & 0 deletions tests/unit_tests/test_tool.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,24 @@
from typing import Any, Type

from langchain_tests.unit_tests import ToolsUnitTests

from langchain_linkup import LinkupTool


class TestLinkupToolUnit(ToolsUnitTests):
@property
def tool_constructor(self) -> Type[LinkupTool]:
return LinkupTool

@property
def tool_constructor_params(self) -> dict[str, Any]:
return dict(
depth="standard",
output_type="searchResults",
)

@property
def tool_invoke_params_example(self) -> dict[str, Any]:
return dict(
query="What's the weather like in Paris, London and Berlin?",
)