isayahc commited on
Commit
137bd7a
·
1 Parent(s): 519bb78

removed tools that did not work too well

Browse files
Files changed (2) hide show
  1. ollama_fucntion_sample.py +0 -76
  2. ollama_tools.py +0 -44
ollama_fucntion_sample.py DELETED
@@ -1,76 +0,0 @@
1
- # LangChain supports many other chat models. Here, we're using Ollama
2
-
3
-
4
- # https://python.langchain.com/docs/integrations/chat/ollama_functions
5
- # https://python.langchain.com/docs/integrations/chat/ollama
6
-
7
-
8
- from langchain_community.chat_models import ChatOllama
9
- from langchain_core.output_parsers import StrOutputParser
10
- from langchain_core.prompts import ChatPromptTemplate
11
- from langchain.tools.retriever import create_retriever_tool
12
- from langchain_community.utilities import SerpAPIWrapper
13
- from langchain.retrievers import ArxivRetriever
14
- from langchain_core.tools import Tool
15
- from langchain import hub
16
- from langchain.agents import AgentExecutor, load_tools
17
- from langchain.agents.format_scratchpad import format_log_to_str
18
- from langchain.agents.output_parsers import (
19
- ReActJsonSingleInputOutputParser,
20
- )
21
- from langchain.tools.render import render_text_description
22
- import os
23
-
24
- import dotenv
25
-
26
- dotenv.load_dotenv()
27
-
28
-
29
- OLLMA_BASE_URL = os.getenv("OLLMA_BASE_URL")
30
-
31
-
32
- # supports many more optional parameters. Hover on your `ChatOllama(...)`
33
- # class to view the latest available supported parameters
34
- llm = ChatOllama(
35
- model="mistral:instruct",
36
- base_url= OLLMA_BASE_URL
37
- )
38
-
39
- from langchain_experimental.llms.ollama_functions import OllamaFunctions
40
-
41
- # model = OllamaFunctions(model="mistral")
42
- model = OllamaFunctions(
43
- model="mistral:instruct",
44
- base_url= OLLMA_BASE_URL
45
- )
46
-
47
-
48
- model = model.bind(
49
- functions=[
50
- {
51
- "name": "get_current_weather",
52
- "description": "Get the current weather in a given location",
53
- "parameters": {
54
- "type": "object",
55
- "properties": {
56
- "location": {
57
- "type": "string",
58
- "description": "The city and state, " "e.g. San Francisco, CA",
59
- },
60
- "unit": {
61
- "type": "string",
62
- "enum": ["celsius", "fahrenheit"],
63
- },
64
- },
65
- "required": ["location"],
66
- },
67
- }
68
- ],
69
- function_call={"name": "get_current_weather"},
70
- )
71
-
72
- from langchain.schema import HumanMessage
73
-
74
- output = model.invoke("what is the weather in Boston?")
75
-
76
- x=0
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
ollama_tools.py DELETED
@@ -1,44 +0,0 @@
1
- from langchain.chains import create_extraction_chain
2
-
3
- # Schema
4
- schema = {
5
- "properties": {
6
- "name": {"type": "string"},
7
- "height": {"type": "integer"},
8
- "hair_color": {"type": "string"},
9
- },
10
- "required": ["name", "height"],
11
- }
12
-
13
- # Input
14
- input = """Alex is 5 feet tall. Claudia is 1 feet taller than Alex and jumps higher than him. Claudia is a brunette and Alex is blonde."""
15
-
16
-
17
-
18
- from langchain_experimental.llms.ollama_functions import OllamaFunctions
19
-
20
-
21
- import os
22
-
23
- import dotenv
24
-
25
- dotenv.load_dotenv()
26
-
27
-
28
- OLLMA_BASE_URL = os.getenv("OLLMA_BASE_URL")
29
-
30
-
31
- # supports many more optional parameters. Hover on your `ChatOllama(...)`
32
- # class to view the latest available supported parameters
33
- model = llm = OllamaFunctions(
34
- model="mistral:instruct",
35
- base_url= OLLMA_BASE_URL
36
- )
37
-
38
- # model = OllamaFunctions(model="mistral")
39
-
40
- # Run chain
41
- # llm = OllamaFunctions(model="mistral:instruct", temperature=0)
42
- chain = create_extraction_chain(schema, llm)
43
- output = chain.run(input)
44
- x = 0