isayahc commited on
Commit
ef152cd
1 Parent(s): 821ebee

example retrieved from documentation

Browse files
Files changed (1) hide show
  1. ollama_tools.py +44 -0
ollama_tools.py ADDED
@@ -0,0 +1,44 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ from langchain.chains import create_extraction_chain
2
+
3
+ # Schema
4
+ schema = {
5
+ "properties": {
6
+ "name": {"type": "string"},
7
+ "height": {"type": "integer"},
8
+ "hair_color": {"type": "string"},
9
+ },
10
+ "required": ["name", "height"],
11
+ }
12
+
13
+ # Input
14
+ input = """Alex is 5 feet tall. Claudia is 1 feet taller than Alex and jumps higher than him. Claudia is a brunette and Alex is blonde."""
15
+
16
+
17
+
18
+ from langchain_experimental.llms.ollama_functions import OllamaFunctions
19
+
20
+
21
+ import os
22
+
23
+ import dotenv
24
+
25
+ dotenv.load_dotenv()
26
+
27
+
28
+ OLLMA_BASE_URL = os.getenv("OLLMA_BASE_URL")
29
+
30
+
31
+ # supports many more optional parameters. Hover on your `ChatOllama(...)`
32
+ # class to view the latest available supported parameters
33
+ model = llm = OllamaFunctions(
34
+ model="mistral:instruct",
35
+ base_url= OLLMA_BASE_URL
36
+ )
37
+
38
+ # model = OllamaFunctions(model="mistral")
39
+
40
+ # Run chain
41
+ # llm = OllamaFunctions(model="mistral:instruct", temperature=0)
42
+ chain = create_extraction_chain(schema, llm)
43
+ output = chain.run(input)
44
+ x = 0