Spaces:
Runtime error
Runtime error
Enable queue
Browse files
app.py
CHANGED
@@ -4,7 +4,7 @@ from threading import Thread
|
|
4 |
import gradio as gr
|
5 |
import torch
|
6 |
from transformers import (AutoModelForCausalLM, AutoTokenizer,
|
7 |
-
|
8 |
|
9 |
theme = gr.themes.Monochrome(
|
10 |
primary_hue="indigo",
|
@@ -82,7 +82,7 @@ with gr.Blocks(theme=theme) as demo:
|
|
82 |
gr.Markdown(
|
83 |
"""<h1><center>π¦π¦π¦ StackLLaMa π¦π¦π¦</center></h1>
|
84 |
|
85 |
-
StackLLaMa is a 7 billion parameter language model that has been trained on pairs of programming questions and answers from [Stack Overflow](https://stackoverflow.com) using Reinforcement Learning from Human Feedback
|
86 |
|
87 |
Type in the box below and click the button to generate answers to your most pressing coding questions π₯!
|
88 |
"""
|
@@ -149,5 +149,5 @@ with gr.Blocks(theme=theme) as demo:
|
|
149 |
submit.click(generate, inputs=[instruction, temperature, max_new_tokens, top_p, top_k], outputs=[output])
|
150 |
instruction.submit(generate, inputs=[instruction, temperature, max_new_tokens, top_p, top_k], outputs=[output])
|
151 |
|
152 |
-
demo.queue()
|
153 |
-
demo.launch()
|
|
|
4 |
import gradio as gr
|
5 |
import torch
|
6 |
from transformers import (AutoModelForCausalLM, AutoTokenizer,
|
7 |
+
TextIteratorStreamer)
|
8 |
|
9 |
theme = gr.themes.Monochrome(
|
10 |
primary_hue="indigo",
|
|
|
82 |
gr.Markdown(
|
83 |
"""<h1><center>π¦π¦π¦ StackLLaMa π¦π¦π¦</center></h1>
|
84 |
|
85 |
+
StackLLaMa is a 7 billion parameter language model that has been trained on pairs of programming questions and answers from [Stack Overflow](https://stackoverflow.com) using Reinforcement Learning from Human Feedback with the [TRL library](https://github.com/lvwerra/trl). For more details, check out our blog post [ADD LINK].
|
86 |
|
87 |
Type in the box below and click the button to generate answers to your most pressing coding questions π₯!
|
88 |
"""
|
|
|
149 |
submit.click(generate, inputs=[instruction, temperature, max_new_tokens, top_p, top_k], outputs=[output])
|
150 |
instruction.submit(generate, inputs=[instruction, temperature, max_new_tokens, top_p, top_k], outputs=[output])
|
151 |
|
152 |
+
demo.queue(concurrency_count=1)
|
153 |
+
demo.launch(enable_queue=True)
|