{ "cells": [ { "cell_type": "code", "execution_count": 3, "metadata": {}, "outputs": [ { "name": "stdout", "output_type": "stream", "text": [ "Collecting arxiv\n", " Using cached arxiv-2.1.0-py3-none-any.whl.metadata (6.1 kB)\n", "Collecting feedparser==6.0.10 (from arxiv)\n", " Using cached feedparser-6.0.10-py3-none-any.whl.metadata (2.3 kB)\n", "Requirement already satisfied: requests==2.31.0 in e:\\langchain\\venv\\lib\\site-packages (from arxiv) (2.31.0)\n", "Collecting sgmllib3k (from feedparser==6.0.10->arxiv)\n", " Using cached sgmllib3k-1.0.0-py3-none-any.whl\n", "Requirement already satisfied: charset-normalizer<4,>=2 in e:\\langchain\\venv\\lib\\site-packages (from requests==2.31.0->arxiv) (3.3.2)\n", "Requirement already satisfied: idna<4,>=2.5 in e:\\langchain\\venv\\lib\\site-packages (from requests==2.31.0->arxiv) (3.6)\n", "Requirement already satisfied: urllib3<3,>=1.21.1 in e:\\langchain\\venv\\lib\\site-packages (from requests==2.31.0->arxiv) (2.2.1)\n", "Requirement already satisfied: certifi>=2017.4.17 in e:\\langchain\\venv\\lib\\site-packages (from requests==2.31.0->arxiv) (2024.2.2)\n", "Using cached arxiv-2.1.0-py3-none-any.whl (11 kB)\n", "Using cached feedparser-6.0.10-py3-none-any.whl (81 kB)\n", "Installing collected packages: sgmllib3k, feedparser, arxiv\n", "Successfully installed arxiv-2.1.0 feedparser-6.0.10 sgmllib3k-1.0.0\n" ] } ], "source": [ "!pip install arxiv" ] }, { "cell_type": "code", "execution_count": 6, "metadata": {}, "outputs": [], "source": [ "from langchain_community.tools import WikipediaQueryRun\n", "from langchain_community.utilities import WikipediaAPIWrapper" ] }, { "cell_type": "code", "execution_count": 16, "metadata": {}, "outputs": [], "source": [ "api_wrapper=WikipediaAPIWrapper(top_k_results=1,doc_content_chars_max=200)\n", "wiki=WikipediaQueryRun(api_wrapper=api_wrapper)" ] }, { "cell_type": "code", "execution_count": 17, "metadata": {}, "outputs": [ { "data": { "text/plain": [ "'wikipedia'" ] }, "execution_count": 17, "metadata": {}, "output_type": "execute_result" } ], "source": [ "wiki.name" ] }, { "cell_type": "code", "execution_count": 12, "metadata": {}, "outputs": [ { "data": { "text/plain": [ "VectorStoreRetriever(tags=['FAISS', 'OpenAIEmbeddings'], vectorstore=)" ] }, "execution_count": 12, "metadata": {}, "output_type": "execute_result" } ], "source": [ "from langchain_community.document_loaders import WebBaseLoader\n", "from langchain_community.vectorstores import FAISS\n", "from langchain_openai import OpenAIEmbeddings\n", "from langchain_text_splitters import RecursiveCharacterTextSplitter\n", "\n", "loader=WebBaseLoader(\"https://docs.smith.langchain.com/\")\n", "docs=loader.load()\n", "documents=RecursiveCharacterTextSplitter(chunk_size=1000,chunk_overlap=200).split_documents(docs)\n", "vectordb=FAISS.from_documents(documents,OpenAIEmbeddings())\n", "retriever=vectordb.as_retriever()\n", "retriever\n" ] }, { "cell_type": "code", "execution_count": 13, "metadata": {}, "outputs": [], "source": [ "from langchain.tools.retriever import create_retriever_tool\n", "retriever_tool=create_retriever_tool(retriever,\"langsmith_search\",\n", " \"Search for information about LangSmith. For any questions about LangSmith, you must use this tool!\")" ] }, { "cell_type": "code", "execution_count": 14, "metadata": {}, "outputs": [ { "data": { "text/plain": [ "'langsmith_search'" ] }, "execution_count": 14, "metadata": {}, "output_type": "execute_result" } ], "source": [ "retriever_tool.name" ] }, { "cell_type": "code", "execution_count": 15, "metadata": {}, "outputs": [ { "data": { "text/plain": [ "'arxiv'" ] }, "execution_count": 15, "metadata": {}, "output_type": "execute_result" } ], "source": [ "## Arxiv Tool\n", "from langchain_community.utilities import ArxivAPIWrapper\n", "from langchain_community.tools import ArxivQueryRun\n", "\n", "arxiv_wrapper=ArxivAPIWrapper(top_k_results=1, doc_content_chars_max=200)\n", "arxiv=ArxivQueryRun(api_wrapper=arxiv_wrapper)\n", "arxiv.name" ] }, { "cell_type": "code", "execution_count": 18, "metadata": {}, "outputs": [], "source": [ "tools=[wiki,arxiv,retriever_tool]" ] }, { "cell_type": "code", "execution_count": 19, "metadata": {}, "outputs": [ { "data": { "text/plain": [ "[WikipediaQueryRun(api_wrapper=WikipediaAPIWrapper(wiki_client=, top_k_results=1, lang='en', load_all_available_meta=False, doc_content_chars_max=200)),\n", " ArxivQueryRun(api_wrapper=ArxivAPIWrapper(arxiv_search=, arxiv_exceptions=(, , ), top_k_results=1, ARXIV_MAX_QUERY_LENGTH=300, load_max_docs=100, load_all_available_meta=False, doc_content_chars_max=200, arxiv_result=)),\n", " Tool(name='langsmith_search', description='Search for information about LangSmith. For any questions about LangSmith, you must use this tool!', args_schema=, func=functools.partial(, retriever=VectorStoreRetriever(tags=['FAISS', 'OpenAIEmbeddings'], vectorstore=), document_prompt=PromptTemplate(input_variables=['page_content'], template='{page_content}'), document_separator='\\n\\n'), coroutine=functools.partial(, retriever=VectorStoreRetriever(tags=['FAISS', 'OpenAIEmbeddings'], vectorstore=), document_prompt=PromptTemplate(input_variables=['page_content'], template='{page_content}'), document_separator='\\n\\n'))]" ] }, "execution_count": 19, "metadata": {}, "output_type": "execute_result" } ], "source": [ "tools" ] }, { "cell_type": "code", "execution_count": 20, "metadata": {}, "outputs": [], "source": [ "from dotenv import load_dotenv\n", "\n", "load_dotenv()\n", "import os\n", "os.environ['OPENAI_API_KEY']=os.getenv(\"OPENAI_API_KEY\")\n", "from langchain_openai import ChatOpenAI\n", "\n", "llm = ChatOpenAI(model=\"gpt-3.5-turbo-0125\", temperature=0)" ] }, { "cell_type": "code", "execution_count": 22, "metadata": {}, "outputs": [ { "data": { "text/plain": [ "[SystemMessagePromptTemplate(prompt=PromptTemplate(input_variables=[], template='You are a helpful assistant')),\n", " MessagesPlaceholder(variable_name='chat_history', optional=True),\n", " HumanMessagePromptTemplate(prompt=PromptTemplate(input_variables=['input'], template='{input}')),\n", " MessagesPlaceholder(variable_name='agent_scratchpad')]" ] }, "execution_count": 22, "metadata": {}, "output_type": "execute_result" } ], "source": [ "from langchain import hub\n", "# Get the prompt to use - you can modify this!\n", "prompt = hub.pull(\"hwchase17/openai-functions-agent\")\n", "prompt.messages" ] }, { "cell_type": "code", "execution_count": 23, "metadata": {}, "outputs": [], "source": [ "### Agents\n", "from langchain.agents import create_openai_tools_agent\n", "agent=create_openai_tools_agent(llm,tools,prompt)" ] }, { "cell_type": "code", "execution_count": 26, "metadata": {}, "outputs": [ { "data": { "text/plain": [ "AgentExecutor(verbose=True, agent=RunnableMultiActionAgent(runnable=RunnableAssign(mapper={\n", " agent_scratchpad: RunnableLambda(lambda x: format_to_openai_tool_messages(x['intermediate_steps']))\n", "})\n", "| ChatPromptTemplate(input_variables=['agent_scratchpad', 'input'], input_types={'chat_history': typing.List[typing.Union[langchain_core.messages.ai.AIMessage, langchain_core.messages.human.HumanMessage, langchain_core.messages.chat.ChatMessage, langchain_core.messages.system.SystemMessage, langchain_core.messages.function.FunctionMessage, langchain_core.messages.tool.ToolMessage]], 'agent_scratchpad': typing.List[typing.Union[langchain_core.messages.ai.AIMessage, langchain_core.messages.human.HumanMessage, langchain_core.messages.chat.ChatMessage, langchain_core.messages.system.SystemMessage, langchain_core.messages.function.FunctionMessage, langchain_core.messages.tool.ToolMessage]]}, metadata={'lc_hub_owner': 'hwchase17', 'lc_hub_repo': 'openai-functions-agent', 'lc_hub_commit_hash': 'a1655024b06afbd95d17449f21316291e0726f13dcfaf990cc0d18087ad689a5'}, messages=[SystemMessagePromptTemplate(prompt=PromptTemplate(input_variables=[], template='You are a helpful assistant')), MessagesPlaceholder(variable_name='chat_history', optional=True), HumanMessagePromptTemplate(prompt=PromptTemplate(input_variables=['input'], template='{input}')), MessagesPlaceholder(variable_name='agent_scratchpad')])\n", "| RunnableBinding(bound=ChatOpenAI(client=, async_client=, model_name='gpt-3.5-turbo-0125', temperature=0.0, openai_api_key=SecretStr('**********'), openai_proxy=''), kwargs={'tools': [{'type': 'function', 'function': {'name': 'wikipedia', 'description': 'A wrapper around Wikipedia. Useful for when you need to answer general questions about people, places, companies, facts, historical events, or other subjects. Input should be a search query.', 'parameters': {'properties': {'__arg1': {'title': '__arg1', 'type': 'string'}}, 'required': ['__arg1'], 'type': 'object'}}}, {'type': 'function', 'function': {'name': 'arxiv', 'description': 'A wrapper around Arxiv.org Useful for when you need to answer questions about Physics, Mathematics, Computer Science, Quantitative Biology, Quantitative Finance, Statistics, Electrical Engineering, and Economics from scientific articles on arxiv.org. Input should be a search query.', 'parameters': {'type': 'object', 'properties': {'query': {'description': 'search query to look up', 'type': 'string'}}, 'required': ['query']}}}, {'type': 'function', 'function': {'name': 'langsmith_search', 'description': 'Search for information about LangSmith. For any questions about LangSmith, you must use this tool!', 'parameters': {'type': 'object', 'properties': {'query': {'description': 'query to look up in retriever', 'type': 'string'}}, 'required': ['query']}}}]})\n", "| OpenAIToolsAgentOutputParser(), input_keys_arg=[], return_keys_arg=[], stream_runnable=True), tools=[WikipediaQueryRun(api_wrapper=WikipediaAPIWrapper(wiki_client=, top_k_results=1, lang='en', load_all_available_meta=False, doc_content_chars_max=200)), ArxivQueryRun(api_wrapper=ArxivAPIWrapper(arxiv_search=, arxiv_exceptions=(, , ), top_k_results=1, ARXIV_MAX_QUERY_LENGTH=300, load_max_docs=100, load_all_available_meta=False, doc_content_chars_max=200, arxiv_result=)), Tool(name='langsmith_search', description='Search for information about LangSmith. For any questions about LangSmith, you must use this tool!', args_schema=, func=functools.partial(, retriever=VectorStoreRetriever(tags=['FAISS', 'OpenAIEmbeddings'], vectorstore=), document_prompt=PromptTemplate(input_variables=['page_content'], template='{page_content}'), document_separator='\\n\\n'), coroutine=functools.partial(, retriever=VectorStoreRetriever(tags=['FAISS', 'OpenAIEmbeddings'], vectorstore=), document_prompt=PromptTemplate(input_variables=['page_content'], template='{page_content}'), document_separator='\\n\\n'))])" ] }, "execution_count": 26, "metadata": {}, "output_type": "execute_result" } ], "source": [ "## Agent Executer\n", "from langchain.agents import AgentExecutor\n", "agent_executor=AgentExecutor(agent=agent,tools=tools,verbose=True)\n", "agent_executor" ] }, { "cell_type": "code", "execution_count": 27, "metadata": {}, "outputs": [ { "name": "stdout", "output_type": "stream", "text": [ "\n", "\n", "\u001b[1m> Entering new AgentExecutor chain...\u001b[0m\n", "\u001b[32;1m\u001b[1;3m\n", "Invoking: `langsmith_search` with `{'query': 'Langsmith'}`\n", "\n", "\n", "\u001b[0m\u001b[38;5;200m\u001b[1;3mGetting started with LangSmith | 🦜️🛠️ LangSmith\n", "\n", "Skip to main contentLangSmith API DocsSearchGo to AppQuick StartUser GuideTracingEvaluationProduction Monitoring & AutomationsPrompt HubProxyPricingSelf-HostingCookbookQuick StartOn this pageGetting started with LangSmithIntroduction​LangSmith is a platform for building production-grade LLM applications. It allows you to closely monitor and evaluate your application, so you can ship quickly and with confidence. Use of LangChain is not necessary - LangSmith works on its own!Install LangSmith​We offer Python and Typescript SDKs for all your LangSmith needs.PythonTypeScriptpip install -U langsmithyarn add langchain langsmithCreate an API key​To create an API key head to the setting pages. Then click Create API Key.Setup your environment​Shellexport LANGCHAIN_TRACING_V2=trueexport LANGCHAIN_API_KEY=# The below examples use the OpenAI API, though it's not necessary in generalexport OPENAI_API_KEY=Log your first trace​We provide multiple ways to log traces\n", "\n", "Learn about the workflows LangSmith supports at each stage of the LLM application lifecycle.Pricing: Learn about the pricing model for LangSmith.Self-Hosting: Learn about self-hosting options for LangSmith.Proxy: Learn about the proxy capabilities of LangSmith.Tracing: Learn about the tracing capabilities of LangSmith.Evaluation: Learn about the evaluation capabilities of LangSmith.Prompt Hub Learn about the Prompt Hub, a prompt management tool built into LangSmith.Additional Resources​LangSmith Cookbook: A collection of tutorials and end-to-end walkthroughs using LangSmith.LangChain Python: Docs for the Python LangChain library.LangChain Python API Reference: documentation to review the core APIs of LangChain.LangChain JS: Docs for the TypeScript LangChain libraryDiscord: Join us on our Discord to discuss all things LangChain!FAQ​How do I migrate projects between organizations?​Currently we do not support project migration betwen organizations. While you can manually imitate this by\n", "\n", "team deals with sensitive data that cannot be logged. How can I ensure that only my team can access it?​If you are interested in a private deployment of LangSmith or if you need to self-host, please reach out to us at sales@langchain.dev. Self-hosting LangSmith requires an annual enterprise license that also comes with support and formalized access to the LangChain team.Was this page helpful?NextUser GuideIntroductionInstall LangSmithCreate an API keySetup your environmentLog your first traceCreate your first evaluationNext StepsAdditional ResourcesFAQHow do I migrate projects between organizations?Why aren't my runs aren't showing up in my project?My team deals with sensitive data that cannot be logged. How can I ensure that only my team can access it?CommunityDiscordTwitterGitHubDocs CodeLangSmith SDKPythonJS/TSMoreHomepageBlogLangChain Python DocsLangChain JS/TS DocsCopyright © 2024 LangChain, Inc.\u001b[0m\u001b[32;1m\u001b[1;3mLangSmith is a platform for building production-grade LLM (Large Language Model) applications. It allows you to closely monitor and evaluate your application, ensuring you can ship quickly and with confidence. LangSmith offers Python and Typescript SDKs for all your development needs. You can create an API key, set up your environment, and log your first trace using LangSmith.\n", "\n", "LangSmith supports various workflows at each stage of the LLM application lifecycle, and you can learn more about its pricing model, self-hosting options, proxy capabilities, tracing capabilities, evaluation capabilities, and the Prompt Hub tool. Additionally, LangSmith provides a Cookbook with tutorials and walkthroughs, along with documentation for the Python and TypeScript LangChain libraries.\n", "\n", "If you have sensitive data that cannot be logged or if you need a private deployment of LangSmith, you can reach out to the LangChain team at sales@langchain.dev for self-hosting options that require an annual enterprise license.\n", "\n", "For more information, you can visit the LangSmith website or join their Discord community to discuss all things LangChain.\u001b[0m\n", "\n", "\u001b[1m> Finished chain.\u001b[0m\n" ] }, { "data": { "text/plain": [ "{'input': 'Tell me about Langsmith',\n", " 'output': 'LangSmith is a platform for building production-grade LLM (Large Language Model) applications. It allows you to closely monitor and evaluate your application, ensuring you can ship quickly and with confidence. LangSmith offers Python and Typescript SDKs for all your development needs. You can create an API key, set up your environment, and log your first trace using LangSmith.\\n\\nLangSmith supports various workflows at each stage of the LLM application lifecycle, and you can learn more about its pricing model, self-hosting options, proxy capabilities, tracing capabilities, evaluation capabilities, and the Prompt Hub tool. Additionally, LangSmith provides a Cookbook with tutorials and walkthroughs, along with documentation for the Python and TypeScript LangChain libraries.\\n\\nIf you have sensitive data that cannot be logged or if you need a private deployment of LangSmith, you can reach out to the LangChain team at sales@langchain.dev for self-hosting options that require an annual enterprise license.\\n\\nFor more information, you can visit the LangSmith website or join their Discord community to discuss all things LangChain.'}" ] }, "execution_count": 27, "metadata": {}, "output_type": "execute_result" } ], "source": [ "agent_executor.invoke({\"input\":\"Tell me about Langsmith\"})" ] }, { "cell_type": "code", "execution_count": 29, "metadata": {}, "outputs": [ { "name": "stdout", "output_type": "stream", "text": [ "\n", "\n", "\u001b[1m> Entering new AgentExecutor chain...\u001b[0m\n", "\u001b[32;1m\u001b[1;3m\n", "Invoking: `arxiv` with `{'query': '1605.08386'}`\n", "\n", "\n", "\u001b[0m\u001b[33;1m\u001b[1;3mPublished: 2016-05-26\n", "Title: Heat-bath random walks with Markov bases\n", "Authors: Caprice Stanley, Tobias Windisch\n", "Summary: Graphs on lattice points are studied whose edges come from a finite set of\n", "allo\u001b[0m\u001b[32;1m\u001b[1;3mThe paper with the identifier 1605.08386 is titled \"Heat-bath random walks with Markov bases\" by Caprice Stanley and Tobias Windisch. It discusses the study of graphs on lattice points where the edges are derived from a finite set of allocations.\u001b[0m\n", "\n", "\u001b[1m> Finished chain.\u001b[0m\n" ] }, { "data": { "text/plain": [ "{'input': \"What's the paper 1605.08386 about?\",\n", " 'output': 'The paper with the identifier 1605.08386 is titled \"Heat-bath random walks with Markov bases\" by Caprice Stanley and Tobias Windisch. It discusses the study of graphs on lattice points where the edges are derived from a finite set of allocations.'}" ] }, "execution_count": 29, "metadata": {}, "output_type": "execute_result" } ], "source": [ "agent_executor.invoke({\"input\":\"What's the paper 1605.08386 about?\"})" ] }, { "cell_type": "code", "execution_count": null, "metadata": {}, "outputs": [], "source": [] } ], "metadata": { "kernelspec": { "display_name": "Python 3", "language": "python", "name": "python3" }, "language_info": { "codemirror_mode": { "name": "ipython", "version": 3 }, "file_extension": ".py", "mimetype": "text/x-python", "name": "python", "nbconvert_exporter": "python", "pygments_lexer": "ipython3", "version": "3.10.0" } }, "nbformat": 4, "nbformat_minor": 2 }