Update main.py
Browse files
main.py
CHANGED
@@ -15,7 +15,7 @@ nltk.download('punkt')
|
|
15 |
tokenizer = nltk.data.load('tokenizers/punkt/english.pickle')
|
16 |
|
17 |
HF_TOKEN = getenv("HF_TOKEN")
|
18 |
-
MODEL = "
|
19 |
FALLBACK_MODELS = [
|
20 |
"mistralai/Mixtral-8x7B-Instruct-v0.1",
|
21 |
"mistralai/Mistral-7B-Instruct-v0.2", "mistralai/Mistral-7B-Instruct-v0.1"
|
@@ -63,6 +63,7 @@ async def generate_response(data: InputData) -> Any:
|
|
63 |
|
64 |
if isinstance(repaired_response, str):
|
65 |
raise HTTPException(status_code=500, detail="Invalid response from model")
|
|
|
66 |
else:
|
67 |
cleaned_response = {}
|
68 |
for key, value in repaired_response.items():
|
|
|
15 |
tokenizer = nltk.data.load('tokenizers/punkt/english.pickle')
|
16 |
|
17 |
HF_TOKEN = getenv("HF_TOKEN")
|
18 |
+
MODEL = "google/gemma-1.1-7b-it"
|
19 |
FALLBACK_MODELS = [
|
20 |
"mistralai/Mixtral-8x7B-Instruct-v0.1",
|
21 |
"mistralai/Mistral-7B-Instruct-v0.2", "mistralai/Mistral-7B-Instruct-v0.1"
|
|
|
63 |
|
64 |
if isinstance(repaired_response, str):
|
65 |
raise HTTPException(status_code=500, detail="Invalid response from model")
|
66 |
+
|
67 |
else:
|
68 |
cleaned_response = {}
|
69 |
for key, value in repaired_response.items():
|