Mbonea commited on
Commit
275b80a
1 Parent(s): 444a3b4

model selector

Browse files
App/Generate/Schema.py CHANGED
@@ -6,6 +6,7 @@ from pydantic import validator
6
  class GeneratorRequest(BaseModel):
7
  prompt: str
8
  grok: Optional[bool] = True
 
9
 
10
 
11
  class GeneratorBulkRequest(BaseModel):
 
6
  class GeneratorRequest(BaseModel):
7
  prompt: str
8
  grok: Optional[bool] = True
9
+ model: str = "llama3-70b-8192"
10
 
11
 
12
  class GeneratorBulkRequest(BaseModel):
App/Generate/generatorRoutes.py CHANGED
@@ -22,7 +22,7 @@ async def main(request: GeneratorRequest):
22
  renderr = RenderVideo()
23
  huggChat = Hugging()
24
  if request.grok:
25
- message = chatbot(Prompt.format(topic=topic))
26
 
27
  else:
28
  temp = await huggChat.chat(
 
22
  renderr = RenderVideo()
23
  huggChat = Hugging()
24
  if request.grok:
25
+ message = chatbot(Prompt.format(topic=topic), model=request.model)
26
 
27
  else:
28
  temp = await huggChat.chat(
App/Generate/utils/GroqInstruct.py CHANGED
@@ -27,10 +27,10 @@ client = instructor.from_groq(client, mode=instructor.Mode.JSON)
27
  # rather than having to use the OpenAISchema class
28
 
29
 
30
- def chatbot(prompt):
31
 
32
  response: VideoOutput = client.chat.completions.create(
33
- model="llama3-70b-8192",
34
  # model="gemma-7b-it",
35
  # model="llama2-70b-4096",
36
  # model="llama3-70b-8192",
 
27
  # rather than having to use the OpenAISchema class
28
 
29
 
30
+ def chatbot(prompt: str, model: str = "llama3-70b-8192"):
31
 
32
  response: VideoOutput = client.chat.completions.create(
33
+ model=model,
34
  # model="gemma-7b-it",
35
  # model="llama2-70b-4096",
36
  # model="llama3-70b-8192",