{ "cells": [ { "cell_type": "code", "execution_count": 1, "metadata": {}, "outputs": [], "source": [ "from langchain_openai import ChatOpenAI\n", "llm = ChatOpenAI()" ] }, { "cell_type": "code", "execution_count": 2, "metadata": {}, "outputs": [], "source": [ "from langchain_community.document_loaders import WebBaseLoader\n", "loader = WebBaseLoader(\"https://docs.smith.langchain.com\")\n", "\n", "docs = loader.load()" ] }, { "cell_type": "code", "execution_count": 3, "metadata": {}, "outputs": [], "source": [ "from langchain_openai import OpenAIEmbeddings\n", "\n", "embeddings = OpenAIEmbeddings()" ] }, { "cell_type": "code", "execution_count": 4, "metadata": {}, "outputs": [], "source": [ "from langchain_community.vectorstores import FAISS\n", "from langchain.text_splitter import RecursiveCharacterTextSplitter\n", "\n", "\n", "text_splitter = RecursiveCharacterTextSplitter()\n", "documents = text_splitter.split_documents(docs)\n", "vector = FAISS.from_documents(documents, embeddings)" ] }, { "cell_type": "code", "execution_count": 5, "metadata": {}, "outputs": [], "source": [ "from langchain.chains import create_history_aware_retriever\n", "from langchain_core.prompts import MessagesPlaceholder\n", "from langchain_core.prompts import ChatPromptTemplate\n", "\n", "# First we need a prompt that we can pass into an LLM to generate this search query\n", "\n", "prompt = ChatPromptTemplate.from_messages([\n", " MessagesPlaceholder(variable_name=\"chat_history\"),\n", " (\"user\", \"{input}\"),\n", " (\"user\", \"Given the above conversation, generate a search query to look up in order to get information relevant to the conversation\")\n", "])\n", "retriever_chain = create_history_aware_retriever(llm, vector.as_retriever(), prompt)" ] }, { "cell_type": "code", "execution_count": 6, "metadata": {}, "outputs": [ { "data": { "text/plain": [ "[Document(page_content=\"LangSmith | 🦜ï¸�ğŸ›\\xa0ï¸� LangSmith\\n\\n\\n\\n\\n\\nSkip to main content🦜ï¸�ğŸ›\\xa0ï¸� LangSmith DocsLangChain Python DocsLangChain JS/TS DocsLangSmith API DocsSearchGo to AppLangSmithUser GuideSetupPricing (Coming Soon)Self-HostingTracingEvaluationMonitoringPrompt HubLangSmithOn this pageLangSmithIntroduction​LangSmith is a platform for building production-grade LLM applications.It lets you debug, test, evaluate, and monitor chains and intelligent agents built on any LLM framework and seamlessly integrates with LangChain, the go-to open source framework for building with LLMs.LangSmith is developed by LangChain, the company behind the open source LangChain framework.Quick Start​Tracing: Get started with the tracing quick start.Evaluation: Get started with the evaluation quick start.Next Steps​Check out the following sections to learn more about LangSmith:User Guide: Learn about the workflows LangSmith supports at each stage of the LLM application lifecycle.Setup: Learn how to create an account, obtain an API key, and configure your environment.Pricing: Learn about the pricing model for LangSmith.Self-Hosting: Learn about self-hosting options for LangSmith.Tracing: Learn about the tracing capabilities of LangSmith.Evaluation: Learn about the evaluation capabilities of LangSmith.Prompt Hub Learn about the Prompt Hub, a prompt management tool built into LangSmith.Additional Resources​LangSmith Cookbook: A collection of tutorials and end-to-end walkthroughs using LangSmith.LangChain Python: Docs for the Python LangChain library.LangChain Python API Reference: documentation to review the core APIs of LangChain.LangChain JS: Docs for the TypeScript LangChain libraryDiscord: Join us on our Discord to discuss all things LangChain!Contact SalesIf you're interested in enterprise security and admin features, special deployment options, or access for large teams, reach out to speak with sales.NextUser GuideIntroductionQuick StartNext StepsAdditional ResourcesCommunityDiscordTwitterGitHubDocs CodeLangSmith SDKPythonJS/TSMoreHomepageBlogCopyright © 2024 LangChain, Inc.\", metadata={'source': 'https://docs.smith.langchain.com', 'title': 'LangSmith | 🦜ï¸�ğŸ›\\xa0ï¸� LangSmith', 'description': 'Introduction', 'language': 'en'})]" ] }, "execution_count": 6, "metadata": {}, "output_type": "execute_result" } ], "source": [ "from langchain_core.messages import HumanMessage, AIMessage\n", "\n", "chat_history = [HumanMessage(content=\"Can LangSmith help test my LLM applications?\"), AIMessage(content=\"Yes!\")]\n", "retriever_chain.invoke({\n", " \"chat_history\": chat_history,\n", " \"input\": \"Tell me how\"\n", "})" ] } ], "metadata": { "kernelspec": { "display_name": "base", "language": "python", "name": "python3" }, "language_info": { "codemirror_mode": { "name": "ipython", "version": 3 }, "file_extension": ".py", "mimetype": "text/x-python", "name": "python", "nbconvert_exporter": "python", "pygments_lexer": "ipython3", "version": "3.11.4" } }, "nbformat": 4, "nbformat_minor": 2 }