vilarin commited on
Commit
3569c20
1 Parent(s): f01a45c

Update app.py

Browse files
Files changed (1) hide show
  1. app.py +9 -7
app.py CHANGED
@@ -52,7 +52,7 @@ h3 {
52
  text-align: center;
53
  }
54
  """
55
- def stream_chat(message: str, history: list, temperature: float, context_window: int, top_p: float, top_k: int, penalty: float):
56
  print(f'message is - {message}')
57
  print(f'history is - {history}')
58
  conversation = []
@@ -69,11 +69,13 @@ def stream_chat(message: str, history: list, temperature: float, context_window:
69
  model="gemma2",
70
  messages=conversation,
71
  stream=True,
72
- temperature=temperature,
73
- top_k=top_k,
74
- top_p=top_p,
75
- num_ctx=context_window,
76
- repeat_penalty=penalty,
 
 
77
  )
78
 
79
  buffer = ""
@@ -108,7 +110,7 @@ with gr.Blocks(css=CSS, theme="soft") as demo:
108
  maximum=2048,
109
  step=1,
110
  value=1024,
111
- label="Context window",
112
  render=False,
113
  ),
114
  gr.Slider(
 
52
  text-align: center;
53
  }
54
  """
55
+ def stream_chat(message: str, history: list, temperature: float, max_new_tokens: int, top_p: float, top_k: int, penalty: float):
56
  print(f'message is - {message}')
57
  print(f'history is - {history}')
58
  conversation = []
 
69
  model="gemma2",
70
  messages=conversation,
71
  stream=True,
72
+ options={
73
+ 'num_predict': max_new_tokens,
74
+ 'temperature': temperature,
75
+ 'top_p': top_p,
76
+ 'top_k': top_k,
77
+ 'repeat_penalty': penalty,
78
+ },
79
  )
80
 
81
  buffer = ""
 
110
  maximum=2048,
111
  step=1,
112
  value=1024,
113
+ label="Max New Tokens",
114
  render=False,
115
  ),
116
  gr.Slider(