init
Browse files
app.py
CHANGED
@@ -3,7 +3,7 @@ import fitz
|
|
3 |
import re
|
4 |
import numpy as np
|
5 |
import tensorflow_hub as hub
|
6 |
-
import
|
7 |
import gradio as gr
|
8 |
import os
|
9 |
import shutil
|
@@ -12,8 +12,13 @@ from tempfile import NamedTemporaryFile
|
|
12 |
from sklearn.neighbors import NearestNeighbors
|
13 |
import huggingface_hub
|
14 |
|
15 |
-
openai.base_url = "https://api-inference.huggingface.co/models/mistralai/Mixtral-8x7B-Instruct-v0.1/v1/"
|
16 |
-
openai.api_key = huggingface_hub.get_token()
|
|
|
|
|
|
|
|
|
|
|
17 |
|
18 |
from util import pdf_to_text, text_to_chunks, SemanticSearch
|
19 |
|
@@ -29,24 +34,15 @@ def load_recommender(path, start_page=1):
|
|
29 |
def generate_text(prompt, model = "gpt-3.5-turbo-16k-0613"):
|
30 |
|
31 |
model="mistralai/Mixtral-8x7B-Instruct-v0.1"
|
32 |
-
|
33 |
-
temperature=0.7
|
34 |
max_tokens=256
|
35 |
-
|
36 |
-
frequency_penalty=0
|
37 |
-
presence_penalty=0
|
38 |
-
message = openai.ChatCompletion.create(
|
39 |
model=model,
|
40 |
messages=[
|
41 |
{"role": "system", "content": "You are a helpful assistant."},
|
42 |
{"role": "assistant", "content": "Here is some initial assistant message."},
|
43 |
{"role": "user", "content": prompt}
|
44 |
],
|
45 |
-
temperature=.3,
|
46 |
max_tokens=max_tokens,
|
47 |
-
top_p=top_p,
|
48 |
-
frequency_penalty=frequency_penalty,
|
49 |
-
presence_penalty=presence_penalty,
|
50 |
).choices[0].message['content']
|
51 |
return message
|
52 |
|
|
|
3 |
import re
|
4 |
import numpy as np
|
5 |
import tensorflow_hub as hub
|
6 |
+
from openai import OpenAI
|
7 |
import gradio as gr
|
8 |
import os
|
9 |
import shutil
|
|
|
12 |
from sklearn.neighbors import NearestNeighbors
|
13 |
import huggingface_hub
|
14 |
|
15 |
+
# openai.base_url = "https://api-inference.huggingface.co/models/mistralai/Mixtral-8x7B-Instruct-v0.1/v1/"
|
16 |
+
# openai.api_key = huggingface_hub.get_token()
|
17 |
+
|
18 |
+
clinet = OpenAI(
|
19 |
+
base_url='https://api-inference.huggingface.co/models/mistralai/Mixtral-8x7B-Instruct-v0.1/v1/',
|
20 |
+
api_key=huggingface_hub.get_token(),
|
21 |
+
)
|
22 |
|
23 |
from util import pdf_to_text, text_to_chunks, SemanticSearch
|
24 |
|
|
|
34 |
def generate_text(prompt, model = "gpt-3.5-turbo-16k-0613"):
|
35 |
|
36 |
model="mistralai/Mixtral-8x7B-Instruct-v0.1"
|
|
|
|
|
37 |
max_tokens=256
|
38 |
+
message = clinet.chat.completions.create(
|
|
|
|
|
|
|
39 |
model=model,
|
40 |
messages=[
|
41 |
{"role": "system", "content": "You are a helpful assistant."},
|
42 |
{"role": "assistant", "content": "Here is some initial assistant message."},
|
43 |
{"role": "user", "content": prompt}
|
44 |
],
|
|
|
45 |
max_tokens=max_tokens,
|
|
|
|
|
|
|
46 |
).choices[0].message['content']
|
47 |
return message
|
48 |
|