Spaces:
Sleeping
Sleeping
pminervini
commited on
Commit
•
28875eb
1
Parent(s):
847df2e
update
Browse files
app.py
CHANGED
@@ -3,7 +3,7 @@ import gradio as gr
|
|
3 |
|
4 |
import torch
|
5 |
from transformers import pipeline, StoppingCriteriaList, MaxTimeCriteria
|
6 |
-
import
|
7 |
|
8 |
from elasticsearch import Elasticsearch
|
9 |
|
@@ -72,7 +72,8 @@ def rag_pipeline(prompt, index="pubmed", num_docs=3, model_name="HuggingFaceH4/z
|
|
72 |
openai_model_name = model_name.split('/')[1]
|
73 |
openai_prompt = '\n\n'.join([m['content'] for m in messages])
|
74 |
|
75 |
-
|
|
|
76 |
prompt=openai_prompt,
|
77 |
max_tokens=generation_kwargs["max_new_tokens"],
|
78 |
n=1,
|
|
|
3 |
|
4 |
import torch
|
5 |
from transformers import pipeline, StoppingCriteriaList, MaxTimeCriteria
|
6 |
+
from openai import OpenAI
|
7 |
|
8 |
from elasticsearch import Elasticsearch
|
9 |
|
|
|
72 |
openai_model_name = model_name.split('/')[1]
|
73 |
openai_prompt = '\n\n'.join([m['content'] for m in messages])
|
74 |
|
75 |
+
client = OpenAI()
|
76 |
+
openai_res = client.completions.create(model=openai_model_name,
|
77 |
prompt=openai_prompt,
|
78 |
max_tokens=generation_kwargs["max_new_tokens"],
|
79 |
n=1,
|