isayahc commited on
Commit
821ebee
1 Parent(s): ce7f674

getting famliar with ollama based functions

Browse files
Files changed (1) hide show
  1. ollama_fucntion_sample.py +76 -0
ollama_fucntion_sample.py ADDED
@@ -0,0 +1,76 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ # LangChain supports many other chat models. Here, we're using Ollama
2
+
3
+
4
+ # https://python.langchain.com/docs/integrations/chat/ollama_functions
5
+ # https://python.langchain.com/docs/integrations/chat/ollama
6
+
7
+
8
+ from langchain_community.chat_models import ChatOllama
9
+ from langchain_core.output_parsers import StrOutputParser
10
+ from langchain_core.prompts import ChatPromptTemplate
11
+ from langchain.tools.retriever import create_retriever_tool
12
+ from langchain_community.utilities import SerpAPIWrapper
13
+ from langchain.retrievers import ArxivRetriever
14
+ from langchain_core.tools import Tool
15
+ from langchain import hub
16
+ from langchain.agents import AgentExecutor, load_tools
17
+ from langchain.agents.format_scratchpad import format_log_to_str
18
+ from langchain.agents.output_parsers import (
19
+ ReActJsonSingleInputOutputParser,
20
+ )
21
+ from langchain.tools.render import render_text_description
22
+ import os
23
+
24
+ import dotenv
25
+
26
+ dotenv.load_dotenv()
27
+
28
+
29
+ OLLMA_BASE_URL = os.getenv("OLLMA_BASE_URL")
30
+
31
+
32
+ # supports many more optional parameters. Hover on your `ChatOllama(...)`
33
+ # class to view the latest available supported parameters
34
+ llm = ChatOllama(
35
+ model="mistral:instruct",
36
+ base_url= OLLMA_BASE_URL
37
+ )
38
+
39
+ from langchain_experimental.llms.ollama_functions import OllamaFunctions
40
+
41
+ # model = OllamaFunctions(model="mistral")
42
+ model = OllamaFunctions(
43
+ model="mistral:instruct",
44
+ base_url= OLLMA_BASE_URL
45
+ )
46
+
47
+
48
+ model = model.bind(
49
+ functions=[
50
+ {
51
+ "name": "get_current_weather",
52
+ "description": "Get the current weather in a given location",
53
+ "parameters": {
54
+ "type": "object",
55
+ "properties": {
56
+ "location": {
57
+ "type": "string",
58
+ "description": "The city and state, " "e.g. San Francisco, CA",
59
+ },
60
+ "unit": {
61
+ "type": "string",
62
+ "enum": ["celsius", "fahrenheit"],
63
+ },
64
+ },
65
+ "required": ["location"],
66
+ },
67
+ }
68
+ ],
69
+ function_call={"name": "get_current_weather"},
70
+ )
71
+
72
+ from langchain.schema import HumanMessage
73
+
74
+ output = model.invoke("what is the weather in Boston?")
75
+
76
+ x=0