Red-tech-hub commited on
Commit
ce5e9df
1 Parent(s): 707bc74

[update] run.py

Browse files
Files changed (1) hide show
  1. run.py +12 -13
run.py CHANGED
@@ -6,15 +6,18 @@ from langchain_community.vectorstores import Chroma
6
  from transformers import AutoModelForCausalLM
7
 
8
  import os
9
-
10
- # Ensure the TRANSFORMERS_CACHE directory exists and is writable
11
-
12
- cache_dir = os.environ['TRANSFORMERS_CACHE']
13
 
14
  model_kwargs = {'trust_remote_code': True}
15
 
 
 
 
 
 
16
  db = Chroma(
17
  persist_directory="./chroma_db",
 
18
  collection_name='CVE'
19
  )
20
 
@@ -28,15 +31,11 @@ Question: {question}
28
 
29
  prompt = ChatPromptTemplate.from_template(template)
30
 
31
- try:
32
- model = AutoModelForCausalLM.from_pretrained(
33
- "zephyr-7b-beta.Q4_K_S.gguf",
34
- model_type='mistral',
35
- threads=3,
36
- )
37
- except Exception as e:
38
- print(f"Failed to load model: {e}")
39
- exit(1)
40
 
41
  chain = (
42
  {"context": retriever, "question": RunnablePassthrough()}
 
6
  from transformers import AutoModelForCausalLM
7
 
8
  import os
9
+ os.environ['TRANSFORMERS_CACHE'] = '/code/model/cache/'
 
 
 
10
 
11
  model_kwargs = {'trust_remote_code': True}
12
 
13
+ # embedding = HuggingFaceEmbeddings(
14
+ # model_name="nomic-ai/nomic-embed-text-v1.5",
15
+ # model_kwargs=model_kwargs
16
+ # )
17
+
18
  db = Chroma(
19
  persist_directory="./chroma_db",
20
+ # embedding_function=embedding,
21
  collection_name='CVE'
22
  )
23
 
 
31
 
32
  prompt = ChatPromptTemplate.from_template(template)
33
 
34
+ model = AutoModelForCausalLM.from_pretrained(
35
+ "zephyr-7b-beta.Q4_K_S.gguf",
36
+ model_type='mistral',
37
+ threads=3,
38
+ )
 
 
 
 
39
 
40
  chain = (
41
  {"context": retriever, "question": RunnablePassthrough()}