Spaces:
Runtime error
Runtime error
Yash Sachdeva
commited on
Commit
·
70864b6
1
Parent(s):
1c519c8
flan
Browse files- question_paper.py +2 -3
question_paper.py
CHANGED
@@ -4,7 +4,7 @@ from fastapi.middleware.cors import CORSMiddleware
|
|
4 |
from fastapi.encoders import jsonable_encoder
|
5 |
from fastapi.responses import JSONResponse
|
6 |
|
7 |
-
pipe_flan = pipeline("text2text-generation", model="google/flan-t5-small", max_new_tokens=
|
8 |
|
9 |
app = FastAPI()
|
10 |
|
@@ -22,7 +22,7 @@ app.add_middleware(
|
|
22 |
)
|
23 |
|
24 |
@app.get("/")
|
25 |
-
def llama():
|
26 |
# prompt = [{'role': 'user', 'content': ""+input}]
|
27 |
# inputs = TOKENIZER.apply_chat_template( prompt, add_generation_prompt=True, return_tensors='pt' )
|
28 |
|
@@ -30,7 +30,6 @@ def llama():
|
|
30 |
|
31 |
# tresponse = TOKENIZER.decode(tokens[0], skip_special_tokens=False)
|
32 |
# print(tresponse)
|
33 |
-
input = "Generate 5 jokes"
|
34 |
output = pipe_flan(input)
|
35 |
text = ""
|
36 |
for o in output:
|
|
|
4 |
from fastapi.encoders import jsonable_encoder
|
5 |
from fastapi.responses import JSONResponse
|
6 |
|
7 |
+
pipe_flan = pipeline("text2text-generation", model="google/flan-t5-small", max_new_tokens=100)
|
8 |
|
9 |
app = FastAPI()
|
10 |
|
|
|
22 |
)
|
23 |
|
24 |
@app.get("/")
|
25 |
+
def llama(input):
|
26 |
# prompt = [{'role': 'user', 'content': ""+input}]
|
27 |
# inputs = TOKENIZER.apply_chat_template( prompt, add_generation_prompt=True, return_tensors='pt' )
|
28 |
|
|
|
30 |
|
31 |
# tresponse = TOKENIZER.decode(tokens[0], skip_special_tokens=False)
|
32 |
# print(tresponse)
|
|
|
33 |
output = pipe_flan(input)
|
34 |
text = ""
|
35 |
for o in output:
|